code
stringlengths 1
199k
|
|---|
import unittest
from app.md5py import MD5
class TddInPythonExample(unittest.TestCase):
def test_object_program(self):
m = MD5()
m.update("1234")
hexdigest = m.hexdigest()
self.assertEqual("81dc9bdb52d04dc20036dbd8313ed055", hexdigest)
if __name__ == '__main__':
unittest.main()
|
from django.db.models import Q
from django.forms.fields import CharField, MultiValueField
from django.forms.widgets import MultiWidget, TextInput
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django_filters.filters import DateFilter, MethodFilter, ModelChoiceFilter
from rest_framework import serializers
from rest_framework.compat import django_filters
from rest_framework.filters import FilterSet
from sapl.base.models import Autor, TipoAutor
from sapl.parlamentares.models import Legislatura
from sapl.utils import generic_relations_for_model
class SaplGenericRelationSearchFilterSet(FilterSet):
q = MethodFilter()
def filter_q(self, queryset, value):
query = value.split(' ')
if query:
q = Q()
for qtext in query:
if not qtext:
continue
q_fs = Q(nome__icontains=qtext)
order_by = []
for gr in generic_relations_for_model(self._meta.model):
sgr = gr[1]
for item in sgr:
if item.related_model != self._meta.model:
continue
flag_order_by = True
for field in item.fields_search:
if flag_order_by:
flag_order_by = False
order_by.append('%s__%s' % (
item.related_query_name(),
field[0])
)
# if len(field) == 3 and field[2](qtext) is not
# None:
q_fs = q_fs | Q(**{'%s__%s%s' % (
item.related_query_name(),
field[0],
field[1]): qtext if len(field) == 2
else field[2](qtext)})
q = q & q_fs
if q:
queryset = queryset.filter(q).order_by(*order_by)
return queryset
class SearchForFieldWidget(MultiWidget):
def decompress(self, value):
if value is None:
return [None, None]
return value
def __init__(self, attrs=None):
widgets = (TextInput, TextInput)
MultiWidget.__init__(self, widgets, attrs)
class SearchForFieldField(MultiValueField):
widget = SearchForFieldWidget
def __init__(self, *args, **kwargs):
fields = (
CharField(),
CharField())
super(SearchForFieldField, self).__init__(fields, *args, **kwargs)
def compress(self, parameters):
if parameters:
return parameters
return None
class SearchForFieldFilter(django_filters.filters.MethodFilter):
field_class = SearchForFieldField
class AutorChoiceFilterSet(SaplGenericRelationSearchFilterSet):
q = MethodFilter()
tipo = ModelChoiceFilter(queryset=TipoAutor.objects.all())
class Meta:
model = Autor
fields = ['q',
'tipo',
'nome', ]
def filter_q(self, queryset, value):
return SaplGenericRelationSearchFilterSet.filter_q(
self, queryset, value).distinct('nome').order_by('nome')
class AutorSearchForFieldFilterSet(AutorChoiceFilterSet):
q = SearchForFieldFilter()
class Meta(AutorChoiceFilterSet.Meta):
pass
def filter_q(self, queryset, value):
value[0] = value[0].split(',')
value[1] = value[1].split(',')
params = {}
for key, v in list(zip(value[0], value[1])):
if v in ['True', 'False']:
v = '1' if v == 'True' else '0'
params[key] = v
return queryset.filter(**params).distinct('nome').order_by('nome')
class AutoresPossiveisFilterSet(FilterSet):
data_relativa = DateFilter(method='filter_data_relativa')
tipo = MethodFilter()
class Meta:
model = Autor
fields = ['data_relativa', 'tipo', ]
def filter_data_relativa(self, queryset, name, value):
return queryset
def filter_tipo(self, queryset, value):
try:
tipo = TipoAutor.objects.get(pk=value)
except:
raise serializers.ValidationError(_('Tipo de Autor inexistente.'))
qs = queryset.filter(tipo=tipo)
return qs
@property
def qs(self):
qs = super().qs
data_relativa = self.form.cleaned_data['data_relativa'] \
if 'data_relativa' in self.form.cleaned_data else None
tipo = self.form.cleaned_data['tipo'] \
if 'tipo' in self.form.cleaned_data else None
if not tipo and not data_relativa:
return qs
if tipo:
# não precisa de try except, já foi validado em filter_tipo
tipo = TipoAutor.objects.get(pk=tipo)
if not tipo.content_type:
return qs
filter_for_model = 'filter_%s' % tipo.content_type.model
if not hasattr(self, filter_for_model):
return qs
if not data_relativa:
data_relativa = timezone.now()
return getattr(self, filter_for_model)(qs, data_relativa).distinct()
def filter_parlamentar(self, queryset, data_relativa):
# não leva em conta afastamentos
legislatura_relativa = Legislatura.objects.filter(
data_inicio__lte=data_relativa,
data_fim__gte=data_relativa).first()
q = Q(
parlamentar_set__mandato__data_inicio_mandato__lte=data_relativa,
parlamentar_set__mandato__data_fim_mandato__isnull=True) | Q(
parlamentar_set__mandato__data_inicio_mandato__lte=data_relativa,
parlamentar_set__mandato__data_fim_mandato__gte=data_relativa)
if legislatura_relativa.atual():
q = q & Q(parlamentar_set__ativo=True)
return queryset.filter(q)
def filter_comissao(self, queryset, data_relativa):
return queryset.filter(
Q(comissao_set__data_extincao__isnull=True,
comissao_set__data_fim_comissao__isnull=True) |
Q(comissao_set__data_extincao__gte=data_relativa,
comissao_set__data_fim_comissao__isnull=True) |
Q(comissao_set__data_extincao__gte=data_relativa,
comissao_set__data_fim_comissao__isnull=True) |
Q(comissao_set__data_extincao__isnull=True,
comissao_set__data_fim_comissao__gte=data_relativa) |
Q(comissao_set__data_extincao__gte=data_relativa,
comissao_set__data_fim_comissao__gte=data_relativa),
comissao_set__data_criacao__lte=data_relativa)
def filter_frente(self, queryset, data_relativa):
return queryset.filter(
Q(frente_set__data_extincao__isnull=True) |
Q(frente_set__data_extincao__gte=data_relativa),
frente_set__data_criacao__lte=data_relativa)
def filter_bancada(self, queryset, data_relativa):
return queryset.filter(
Q(bancada_set__data_extincao__isnull=True) |
Q(bancada_set__data_extincao__gte=data_relativa),
bancada_set__data_criacao__lte=data_relativa)
def filter_bloco(self, queryset, data_relativa):
return queryset.filter(
Q(bloco_set__data_extincao__isnull=True) |
Q(bloco_set__data_extincao__gte=data_relativa),
bloco_set__data_criacao__lte=data_relativa)
def filter_orgao(self, queryset, data_relativa):
# na implementação, não havia regras a implementar para orgao
return queryset
|
import re
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
PYPI_RST_FILTERS = (
# Replace code-blocks
(r'\.\.\s? code-block::\s*(\w|\+)+', '::'),
# Replace image
(r'\.\.\s? image::.*', ''),
# Remove travis ci badge
(r'.*travis-ci\.org/.*', ''),
# Remove pypip.in badges
(r'.*pypip\.in/.*', ''),
(r'.*crate\.io/.*', ''),
(r'.*coveralls\.io/.*', ''),
)
def rst(filename):
'''
Load rst file and sanitize it for PyPI.
Remove unsupported github tags:
- code-block directive
- travis ci build badge
'''
content = open(filename).read()
for regex, replacement in PYPI_RST_FILTERS:
content = re.sub(regex, replacement, content)
return content
def required(filename):
with open(filename) as f:
packages = f.read().splitlines()
return packages
setup(
name="serialkiller-plugins",
version="0.0.2",
description="Plugins for serialkiller project",
long_description=rst('README.rst') + rst('CHANGELOG.txt'),
author="Bruno Adelé",
author_email="Bruno Adelé <bruno@adele.im>",
url="https://github.com/badele/serialkiller-plugins",
license="GPL",
install_requires=required('requirements/base.txt'),
setup_requires=[],
tests_require=[
'pep8',
'coveralls'
],
test_suite='tests',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
classifiers=[
'Programming Language :: Python',
],
)
|
T = input()
if T%2==0:
print T/2
else:
print ((T-1)/2)-T
|
"""
distutils setup script for distributing Timbl Tools
"""
__authors__ = "Erwin Marsi <e.marsi@gmail.com>"
from distutils.core import setup
from glob import glob
from os import walk, path, remove
from os.path import basename, isdir, join, exists
from shutil import rmtree
if exists('MANIFEST'): remove('MANIFEST')
if exists("build"): rmtree("build")
name = "timbl-tools"
version = "0.5.0"
description = """Timbl Tools is a collection of Python modules and scripts for
working with TiMBL, the Tilburg Memory-based Learner."""
long_description = """
Timbl Tools is a collection of Python modules and scripts for working with
TiMBL, the Tilburg Memory-based Learner. It provides support for:
* creating Timbl servers and clients
* running (cross-validated) experiments
* lazy parsing of verbose Timbl ouput (e.g. NN distributions)
* down-sampling of instances
* writing ascii graphs of the feature weights
"""
packages = [ root[4:]
for (root, dirs, files) in walk("lib")
if not ".svn" in root ]
def get_data_files(data_dir_prefix, dir):
# data_files specifies a sequence of (directory, files) pairs
# Each (directory, files) pair in the sequence specifies the installation directory
# and the files to install there.
data_files = []
for base, subdirs, files in walk(dir):
install_dir = join(data_dir_prefix, base)
files = [ join(base, f) for f in files
if not f.endswith(".pyc") and not f.endswith("~") ]
data_files.append((install_dir, files))
if '.svn' in subdirs:
subdirs.remove('.svn') # ignore svn directories
return data_files
data_dir = join("share", "%s-%s" % (name, version))
data_files = [(data_dir, ['CHANGES', 'COPYING', 'INSTALL', 'README'])]
data_files += get_data_files(data_dir, "doc")
data_files += get_data_files(data_dir, "data")
sdist_options = dict(
formats=["zip","gztar","bztar"])
setup(
name = name,
version = version,
description = description,
long_description = long_description,
license = "GNU Public License v3",
author = "Erwin Marsi",
author_email = "e.marsi@gmail.com",
url = "https://github.com/emsrc/timbl-tools",
requires = ["networkx"],
provides = ["tt (%s)" % version],
package_dir = {"": "lib"},
packages = packages,
scripts = glob(join("bin","*.py")),
data_files = data_files,
platforms = "POSIX, Mac OS X, MS Windows",
keywords = [
"TiMBL"],
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: GNU Public License (GPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Natural Language :: English"
],
options = dict(sdist=sdist_options)
)
|
from setuptools import setup, find_packages
import sys, os
version = '1.3'
long_description = """The raisin.restyler package is a part of Raisin, the web application
used for publishing the summary statistics of Grape, a pipeline used for processing and
analyzing RNA-Seq data."""
setup(name='raisin.restyler',
version=version,
description="A package used in the Raisin web application",
long_description=long_description,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Topic :: Software Development :: Libraries :: Python Modules',
'Operating System :: POSIX :: Linux'],
keywords='RNA-Seq pipeline ngs transcriptome bioinformatics ETL',
author='Maik Roder',
author_email='maikroeder@gmail.com',
url='http://big.crg.cat/services/grape',
license='GPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages = ['raisin'],
package_data = {'raisin.restyler':['templates/*.pt']},
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
'configobj',
'zope.pagetemplate'
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
import sys
from functools import partial
from uuid import UUID
from hashlib import sha1
from os import path, listdir
from zipfile import ZipFile
from subprocess import Popen, TimeoutExpired
import nacl.utils
import nacl.secret
def isValidUUID(uid):
"""
Validate UUID
@param uid: UUID value to be verfied, can be bytes or str
@return: True if UUID valid, else False
"""
try:
# attempt convertion from bytes to str
uid = uid.decode('ascii')
except AttributeError:
# is already bytes object
pass
except UnicodeDecodeError:
# uid contains non-ascii characters, invalid UUID
return False
try:
out = UUID(uid, version=4)
except ValueError:
return False
# check converted value from UUID equals original value. UUID class is not strict on input
return str(out) == uid
def encrypt(safe, *args):
"""
Encrypt all provided data
@param safe: encryption class
@param args: data to be encrypted
@return: encryption output iterable
"""
return (safe.encrypt(a, nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)) for a in args)
def sha1sum(filePath, blocksize=1024):
"""
Calculate SHA1 hash of file
@param filePath: Path to hashable file
@param blocksize: Amount of bytes to read into memory before hashing
@return: SHA1 hash value (bytes)
"""
with open(filePath, mode='rb') as f:
out = sha1()
for buf in iter(partial(f.read, blocksize), b''):
out.update(buf)
return bytes(out.hexdigest(), encoding='ascii')
def checkCerts():
"""
Checks to see if required TLS certificates exist in Resources directory. Attempts to generate certificates if not found
@returns: Boolean value based on success
"""
resDir = absolutePath('Resources')
command = None
success = False
# check to see if required certificates exist
if not all(True if path.isfile(path.join(resDir, cert)) else False for cert in ('server.crt', 'server.key.orig')):
############
# Check OS
############
if sys.platform in ('linux', 'darwin'):
# bash script run
command = 'sh {}'.format('create_certs_linux.sh')
elif sys.platform == 'win32':
hasOpenSSL = False
# check for openssl requirement (downloaded during installer run)
files = sorted((path.isdir(f), f) for f in listdir(resDir) if f.lower().startswith('openssl-'))
# check for expanded directory and executable
for isDir, ofile in files:
if isDir and path.isfile(path.join(resDir, ofile, 'openssl.exe')):
hasOpenSSL = True
newDir = ofile
break
if not hasOpenSSL and files:
# sorted filename to list newest version first)
for ofile in sorted(f for isDir, f in files if not isDir and path.splitext(f)[1] == '.zip'):
# extract archive
with ZipFile(path.join(resDir, ofile), 'r') as ozip:
newDir = path.join(resDir, path.splitext(ofile)[0])
ozip.extractall(path=newDir)
# verify openssl.exe exists in directory
if path.isfile(path.join(newDir, 'openssl.exe')):
hasOpenSSL = True
break
if hasOpenSSL:
# write openssl directory to config file
with open(path.join(resDir, 'openssl.cfg'), 'w') as config:
config.writelines([newDir])
# windows bat command file
command = r'cmd /c {}'.format('create_certs_windows.bat')
if command:
proc = Popen([command], cwd=resDir, shell=True)
try:
proc.wait(180)
except TimeoutExpired:
proc.kill()
# check command has generated correct files
if all(True if path.isfile(path.join(resDir, cert)) else False for cert in ('server.crt', 'server.key.orig')):
success = True
else:
success = True
return success
def absolutePath(pathname):
"""
Return the absolute path of the given file or directory
@return: absolute path
"""
if getattr(sys, 'frozen', False):
# Frozen application denotes packaged application, modules are moved into a zip
datadir = path.dirname(sys.executable)
else:
# Source based installation, use parent directory of this module's directory
datadir = path.join(path.dirname(__file__), path.pardir)
return path.abspath(path.join(datadir, pathname))
|
'''
' configurationGui.py
' Author: Iker Pedrosa
'
' License:
' This file is part of orderedFileCopy.
'
' orderedFileCopy is free software: you can redistribute it and/or modify
' it under the terms of the GNU General Public License as published by
' the Free Software Foundation, either version 3 of the License, or
' (at your option) any later version.
'
' orderedFileCopy is distributed in the hope that it will be useful,
' but WITHOUT ANY WARRANTY; without even the implied warranty of
' MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
' GNU General Public License for more details.
'
' You should have received a copy of the GNU General Public License
' along with orderedFileCopy. If not, see <http://www.gnu.org/licenses/>.
'
'''
from Tkinter import *
from fileManager import *
import tkFileDialog
import globals
class configurationGUI:
def __init__(self, master):
master.grab_set()
#The contrary is master.grab_release()
#Window title
self.master = master
master.title("Configuration menu")
#Window position and size
windowWidth = 600
windowHeight = 150
screenWidth = master.winfo_screenwidth()
screenHeight = master.winfo_screenheight()
print("configurationGui: screenWidth %d" % screenWidth)
print("configurationGui: screenHeight %d" % screenHeight)
windowWidthPosition = (screenWidth - windowWidth) / 2
windowHeightPosition = ((screenHeight - windowHeight) / 2) - windowHeight
print("configurationGui: windowWidthPosition %d" % windowWidthPosition)
print("configurationGui: windowHeightPosition %d" % windowHeightPosition)
master.geometry("%dx%d+%d+%d" % (windowWidth, windowHeight, windowWidthPosition, windowHeightPosition))
#Create layouts
top_frame = Frame(master, width = 600, height = 50)
centre_frame = Frame(master, width = 600, height = 50)
below_frame = Frame(master, width = 600, height = 50)
bottom_frame = Frame(master, width = 600, height = 50)
top_frame.grid(row = 0)
centre_frame.grid(row = 1)
below_frame.grid(row = 2)
bottom_frame.grid(row = 3)
#Extension information
self.labelExtension = Label(top_frame, height = 1, width = 30, font = ("Helvetica", 11), text = "File extension to copy:")
self.labelExtension.grid(row = 0, column = 0)
self.textExtension = Text(top_frame, height = 1, width = 5, font = ("Helvetica", 11))
self.textExtension.grid(row = 0, column = 1)
self.textExtension.insert(END, globals.extension)
#Default origin information
globals.windowDefaultOrigin = StringVar()
globals.windowDefaultOrigin.set(globals.selectedDefaultOrigin)
self.textDefaultOriginPath = Entry(centre_frame, width = 55, font = ("Helvetica", 11), textvariable = globals.windowDefaultOrigin)
self.textDefaultOriginPath.grid(row = 1, column = 0)
self.buttonDefaultOriginPath = Button(centre_frame, text = "...", command = self.defaultOriginFileChooser)
self.buttonDefaultOriginPath.grid(row = 1, column = 1, padx = 10)
#Destination by USB information
self.labelUsb = Label(below_frame, width = 15, font = ("Helvetica", 11), text = "Destination by USB")
self.labelUsb.grid(row = 0, column = 0)
self.localUsbState = IntVar()
self.localUsbState.set(globals.selectedUsbState)
self.checkboxUsb = Checkbutton(below_frame, command = self.activateUsbName, variable = self.localUsbState, onvalue=1, offvalue=0)
self.checkboxUsb.grid(row = 0, column = 1)
self.textUsb = Text(below_frame, height = 1, width = 25, font = ("Helvetica", 11), state = "disabled")
self.textUsb.grid(row = 0, column = 2)
if globals.selectedUsbState == 1:
self.textUsb.configure(state = "normal")
else:
self.textUsb.configure(state = "disabled")
self.textUsb.insert(END, globals.selectedUsbName)
#Buttons
self.buttonAccept = Button(bottom_frame, text = "Accept", command = self.accept)
self.buttonAccept.grid(row = 2, column = 0, padx = 25, pady = 20)
self.buttonCancel = Button(bottom_frame, text = "Cancel", command = self.cancel)
self.buttonCancel.grid(row = 2, column = 1, padx = 25, pady = 20)
#Finished __init__
def defaultOriginFileChooser(self):
resultPath = tkFileDialog.askdirectory(initialdir = globals.selectedDefaultOrigin) + "/"
if resultPath != "/" and resultPath != "":
globals.selectedDefaultOrigin = resultPath.encode("utf-8")
globals.windowDefaultOrigin.set(globals.selectedDefaultOrigin)
#Finished originFileChooser
def accept(self):
globals.extension = self.textExtension.get("1.0", "end-1c")
globals.selectedUsbName = self.textUsb.get("1.0", "end-1c")
writeConfiguration()
print("accept: globals.selectedDefaultOrigin '%s'" % globals.selectedDefaultOrigin)
print("accept: globals.extension '%s'" % globals.extension)
self.master.destroy()
#Finished accept
def activateUsbName(self):
if self.localUsbState.get() == 1:
globals.selectedUsbState = 1
self.textUsb.configure(state = "normal")
self.textUsb.insert(END, globals.selectedUsbName)
else:
globals.selectedUsbState = 0
self.textUsb.delete("1.0", END)
self.textUsb.configure(state = "disabled")
#Finished activateUsbName
def cancel(self):
self.master.destroy()
#Finished cancel
|
class Shape:
def __init__(self):
return
def draw(self):
return
class Circle(Shape):
def draw(self):
print("Inside Circle::draw() method.")
class Rectangle(Shape):
def draw(self):
print("Inside Rectangle::draw() method.")
class Square(Shape):
def draw(self):
print("Inside Square::draw() method.")
class Color:
def __init__(self):
return
def fill(self):
return
class Blue(Color):
def fill(self):
print("Inside Blue::fill() method.")
class Green(Color):
def fill(self):
print("Inside Green::fill() method.")
class Red(Color):
def fill(self):
print("Inside Red::fill() method.")
class AbstractFactory:
def __init__(self):
return
def getShape(self, shapeType):
return
def getColor(self, colorType):
return
class ColorFactory(AbstractFactory):
def getColor(self, colorType):
if not colorType:
return
elif colorType == 'BLUE':
return Blue()
elif colorType == 'GREEN':
return Green()
elif colorType == 'RED':
return Red()
return
def getShape(self, shapeType):
return
class ShapeFactory(AbstractFactory):
def getShape(self, shapeType):
if not shapeType:
return
elif shapeType == 'CIRCLE':
return Circle()
elif shapeType == 'RECTANGLE':
return Rectangle()
elif shapeType == 'SQUARE':
return Square()
return
def getColor(self, colorType):
return
class FactoryProducer:
def getFactory(self, choice):
if choice == 'SHAPE':
return ShapeFactory()
elif choice == 'COLOR':
return ColorFactory()
return
class AbstractFactoryPatternDemo:
def __init__(self):
self.shapeFactory = FactoryProducer().getFactory("SHAPE")
self.colorFactory = FactoryProducer().getFactory("COLOR")
self.shape_list = ["CIRCLE", "RECTANGLE", "SQUARE"]
self.color_list = ["BLUE", "GREEN", "RED"]
def run(self):
for i in self.shape_list:
shape = self.shapeFactory.getShape(i)
shape.draw()
for i in self.color_list:
color1 = self.colorFactory.getColor(i)
color1.fill()
A = AbstractFactoryPatternDemo()
A.run()
|
import mutagen
import os
import re
import sys
from optparse import OptionParser
music_file_exts = ['.mp3', '.wav', '.ogg']
seconds_re = re.compile('(\d+)(\.\d+)? seconds')
def main(argv):
(options, args) = build_parser().parse_args(argv)
validate_options(options)
print('playlist(s) will be written to ', options.outdir)
if not options.contains and not options.regex:
playlists = build_top_10_playlists(options.start_at, [], options.extended,
options.absolute, options.depth)
else:
predicates = build_match_predicates(options.contains, options.regex)
playlists = [build_playlist(options.start_at, predicates, options.extended,
options.absolute, options.depth, options.name)]
outdir = options.outdir.rstrip(os.path.sep)
write_playlists(playlists, outdir)
def build_match_predicates(contains, regex):
predicates = []
if contains:
c = contains.lower()
predicates.append(
lambda x: c in os.path.basename(x['path']).lower() or c in x['title'].lower() or c in x['artist'].lower()
)
if regex:
r = re.compile(regex)
predicates.append(
lambda x: re.search(r, os.path.basename(x['path'])) or re.search(r, x['title']) or re.search(r, x['artist'])
)
return predicates
def build_parser():
parser = OptionParser()
parser.add_option('-n', '--name', dest='name', default=os.path.basename(os.getcwd()),
help='NAME of playlist', metavar='NAME')
parser.add_option('-s', '--start-at', dest='start_at', default=os.getcwd(),
help='DIR location to start media file search from (default is current DIR)',
metavar='DIR')
parser.add_option('-e', '--extended', dest='extended',
action='store_true', default=False,
help='use m3u extended format (has additional media metadata)')
parser.add_option('-a', '--absolute', dest='absolute',
action='store_true', default=False,
help='use absolute file paths (default is relative paths)')
parser.add_option('-d', '--depth', dest='depth', type="int", default=-1,
help='depth to search, 0 for target dir only (default is fully recursive)')
parser.add_option('-o', '--outdir', dest='outdir', default=os.getcwd(),
help='DIR location of output file(s) (default is current DIR)',
metavar='DIR')
parser.add_option('-c', '--contains', dest='contains', default=None,
help='case insensitive match on given string, i.e. "string contains SUBSTR". ' +
'Checks file names and metadata.', metavar='SUBSTR')
parser.add_option('-r', '--regex', dest='regex', default=None,
help='regex match. checks file name and metadata',
metavar='EXP')
parser.add_option('-f', '--force', dest='force', default=False,
action='store_true', help='force execution through warnings')
return parser
def validate_options(options):
if not os.path.isdir(options.outdir):
print('output directory does not exist!')
sys.exit(1)
if not os.path.isdir(options.start_at):
print('starting directory does not exist!')
sys.exit(1)
if options.depth != -1:
print('invalid depth: ' + str(options.depth))
sys.exit(1)
if os.path.exists(
os.path.join(options.outdir,
options.name if options.name.endswith('.m3u') else options.name + '.m3u')):
if options.force:
print('overwriting playlist: ' + options.name)
else:
print('playlist already exists with name: ' + options.name)
print('run with option -f to overwrite existing playlist')
sys.exit(1)
class Playlist:
def __init__(self, path, extended, absolute, name):
self.items = []
self.predicates = []
self.path = path
self.isExtended = extended
self.isAbsolute = absolute
self.name = name if name.endswith('.m3u') else name + '.m3u'
def __str__(self):
return self.name + ' items: ' + str(len(self.items))
def get_out_str(self, item, outdir):
x = 0
if not self.isAbsolute:
while x < len(outdir) and x < len(item['path']) \
and outdir[x] == item['path'][x]:
x += 1
if x == 0:
x = -1
if self.isExtended:
return '\n' + '#EXTINF:' + item['seconds'] + ', ' + item['artist'] + ' - ' + item['title'] \
+ '\n' + item['path'][x + 1:]
else:
return '\n' + item['path'][x + 1:]
def write_playlists(playlists, outdir):
for p in playlists:
print('writing playlist: ' + str(p))
with open(os.path.join(outdir, p.name), mode='w') as p_out:
if p.isExtended:
p_out.write('#EXTM3U')
else:
p_out.write('#STDM3U')
for i in p.items:
p_out.write(p.get_out_str(i, outdir))
def all_pass(x, predicates):
for p in predicates:
if not p(x):
return False
return True
def extract_metadata(path, extended=False):
meta = {'path': path, 'title': '', 'artist': '', 'seconds': '0'}
if extended:
f = mutagen.File(path)
if f:
match = re.search(seconds_re, f.info.pprint())
meta['seconds'] = match.group(1) if match else '0'
else:
f = {}
meta['title'] = f.get('title',
[os.path.basename(path)])[0]
meta['artist'] = f.get('artist',
[path.split(os.path.sep)[-2]])[0]
return meta
def build_top_10_playlists(root_path, predicates, extended, absolute, depth):
playlists = []
predicates.append(
lambda x: re.search('^\d{2}_\d{2} ', os.path.basename(x['path']))
)
predicates.append(
lambda x: int(os.path.basename(x['path'])[3:5]) < 11
)
for d in os.listdir(root_path):
dpath = os.path.join(root_path, d)
if os.path.isdir(dpath) \
and re.search('^\d{4}$', d) \
and 2100 > int(d) > 1900:
playlists.append(build_playlist(dpath, predicates,
extended, absolute,
0, os.path.basename(dpath)))
return playlists
def build_playlist(root_path, predicates, extended, absolute, depth, name):
playlist = Playlist(root_path, extended, absolute, name)
for root, dirs, files in os.walk(root_path):
for f in files:
path = os.path.join(root, f)
if os.path.splitext(path)[1].lower() in music_file_exts:
item = extract_metadata(path, extended)
if all_pass(item, predicates):
playlist.items.append(item)
return playlist
if __name__ == "__main__":
main(sys.argv[1:])
|
"""Configuration for a load testing using Locust.
To start load testing, run `make server` and `make test-load`.
"""
import random
from json.decoder import JSONDecodeError
from django.urls import reverse
from locust import HttpLocust, TaskSet, task
class SolvingTaskBehavior(TaskSet):
"""Describes interaction of a simulated user with a single task.
The users requests a randomly choosen task,
then she does random number of edits and unsuccessful executions,
and finally she solves the task.
"""
SOLVE_PROBABILITY = 0.3
def on_start(self):
selected_task = random.choice(self.parent.task_names)
self.start_task(selected_task)
def start_task(self, task_name):
url = self.parent.action_urls['start_task']
data = {'task': task_name}
response = self.parent.post_with_cookies(url, data)
self.task_session_id = response.json()['task_session_id']
self.edit_program()
@task(1)
def run_program(self):
url = self.parent.action_urls['run_program']
solved = random.random() < self.SOLVE_PROBABILITY
data = {
'task-session-id': self.task_session_id,
'program': 'f',
'correct': solved}
self.parent.post_with_cookies(url, data)
if solved:
self.interrupt()
@task(5)
def edit_program(self):
url = self.parent.action_urls['edit_program']
data = {
'task-session-id': self.task_session_id,
'program': 'f'}
self.parent.post_with_cookies(url, data)
class UserBehavior(TaskSet):
"""Describes interaction of a simulated user with the server.
"""
tasks = [SolvingTaskBehavior]
def __init__(self, parent):
super().__init__(parent)
self.cookies = {}
self.action_urls = {}
self.task_names = None
def on_start(self):
"""Fill in cookies so that post request can be made later.
"""
response = self.visit_homepage()
self.save_cookies(response)
self.save_tasks()
self.save_action_urls()
def visit_homepage(self):
response = self.client.get('/')
return response
def save_tasks(self):
response = self.client.get('/learn/api/tasks/')
self.save_cookies(response)
self.task_names = [task['name'] for task in response.json()]
def save_action_urls(self):
"""The session and lazy user is created. Now tasks can be solved.
"""
user_response = self.client.get('/learn/api/users/current')
self.save_cookies(user_response)
student_url = user_response.json()['student']
student_response = self.client.get(student_url)
self.save_cookies(student_response)
self.action_urls['start_task'] = student_response.json()['start_task']
self.action_urls['edit_program'] = student_response.json()['edit_program']
self.action_urls['run_program'] = student_response.json()['run_program']
def save_cookies(self, response):
"""Stores cookies for later usage.
"""
self.cookies.update(response.cookies.get_dict())
def post_with_cookies(self, url, data):
"""Post request with correctly set cookies and headers.
"""
csrf_token = self.cookies['csrftoken']
data['csrfmiddlewaretoken'] = csrf_token
headers = {'X-CSRFToken': csrf_token, 'Referer': self.client.base_url}
response = self.client.post(url, data, headers=headers, cookies=self.cookies)
self.save_cookies(response)
self.log_errors(response)
return response
@staticmethod
def log_errors(response):
if not response.ok:
with open('request_errors.log', 'a') as f:
f.writelines(response.text)
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait = 500
max_wait = 5000
|
"""
Module implementing templates for the documentation generator (lists style).
"""
from __future__ import unicode_literals
headerTemplate = \
'''<!DOCTYPE html>
<html><head>
<title>{{Title}}</title>
<meta charset="UTF-8">
</head>
<body style="background-color:{BodyBgColor};color:{BodyColor}">'''
footerTemplate = '''
</body></html>'''
moduleTemplate = \
'''<a NAME="top" ID="top"></a>
<h1 style="background-color:{Level1HeaderBgColor};color:{Level1HeaderColor}">
{{Module}}</h1>
{{ModuleDescription}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Global Attributes</h3>
{{GlobalsList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Classes</h3>
{{ClassList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Functions</h3>
{{FunctionList}}
<hr />'''
rbFileTemplate = \
'''<a NAME="top" ID="top"></a>
<h1 style="background-color:{Level1HeaderBgColor};color:{Level1HeaderColor}">
{{Module}}</h1>
{{ModuleDescription}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Global Attributes</h3>
{{GlobalsList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Classes</h3>
{{ClassList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Modules</h3>
{{RbModulesList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Functions</h3>
{{FunctionList}}
<hr />'''
classTemplate = \
'''<hr />
<a NAME="{{Anchor}}" ID="{{Anchor}}"></a>
<h2 style="background-color:{CFBgColor};color:{CFColor}">{{Class}}</h2>
{{ClassDescription}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Derived from</h3>
{{ClassSuper}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Class Attributes</h3>
{{GlobalsList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Class Methods</h3>
{{ClassMethodList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Methods</h3>
{{MethodList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Static Methods</h3>
{{StaticMethodList}}
{{MethodDetails}}
<div align="right"><a style="color:{LinkColor}" href="#top">Up</a></div>
<hr />'''
methodTemplate = \
'''<a NAME="{{Anchor}}.{{Method}}" ID="{{Anchor}}.{{Method}}"></a>
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
{{Class}}.{{Method}}{{MethodClassifier}}</h3>
<b>{{Method}}</b>(<i>{{Params}}</i>)
{{MethodDescription}}'''
constructorTemplate = \
'''<a NAME="{{Anchor}}.{{Method}}" ID="{{Anchor}}.{{Method}}"></a>
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
{{Class}} (Constructor)</h3>
<b>{{Class}}</b>(<i>{{Params}}</i>)
{{MethodDescription}}'''
rbModuleTemplate = \
'''<hr />
<a NAME="{{Anchor}}" ID="{{Anchor}}"></a>
<h2 style="background-color:{CFBgColor};color:{CFColor}">{{Module}}</h2>
{{ModuleDescription}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Module Attributes</h3>
{{GlobalsList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Classes</h3>
{{ClassesList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Functions</h3>
{{FunctionsList}}
<hr />
{{ClassesDetails}}
{{FunctionsDetails}}
<div align="right"><a style="color:{LinkColor}" href="#top">Up</a></div>
<hr />'''
rbModulesClassTemplate = \
'''<a NAME="{{Anchor}}" ID="{{Anchor}}"></a>
<h2 style="background-color:{CFBgColor};color:{CFColor}">{{Class}}</h2>
{{ClassDescription}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Derived from</h3>
{{ClassSuper}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Methods</h3>
{{MethodList}}
{{MethodDetails}}
<div align="right"><a style="color:{LinkColor}" href="#top">Up</a></div>
<hr />'''
functionTemplate = \
'''<hr />
<a NAME="{{Anchor}}" ID="{{Anchor}}"></a>
<h2 style="background-color:{CFBgColor};color:{CFColor}">{{Function}}</h2>
<b>{{Function}}</b>(<i>{{Params}}</i>)
{{FunctionDescription}}
<div align="right"><a style="color:{LinkColor}" href="#top">Up</a></div>
<hr />'''
listTemplate = \
'''<table>
{{Entries}}
</table>'''
listEntryTemplate = \
'''<tr>
<td><a style="color:{LinkColor}" href="#{{Link}}">{{Name}}</a></td>
<td>{{Deprecated}}{{Description}}</td>
</tr>'''
listEntryNoneTemplate = '''<tr><td>None</td></tr>'''
listEntryDeprecatedTemplate = '''<b>Deprecated.</b>'''
listEntrySimpleTemplate = '''<tr><td>{{Name}}</td></tr>'''
paragraphTemplate = \
'''<p>
{{Lines}}
</p>'''
parametersListTemplate = \
'''<dl>
{{Parameters}}
</dl>'''
parametersListEntryTemplate = \
'''<dt><i>{{Name}}</i></dt>
<dd>
{{Description}}
</dd>'''
parameterTypesListEntryTemplate = \
'''<dt><i>{{Name}}</i> ({{Type}})</dt>
<dd>
{{Description}}
</dd>'''
returnsTemplate = \
'''<dl>
<dt>Returns:</dt>
<dd>
{{0}}
</dd>
</dl>'''
returnTypesTemplate = \
'''<dl>
<dt>Return Type:</dt>
<dd>
{{0}}
</dd>
</dl>'''
exceptionsListTemplate = \
'''<dl>
{{Exceptions}}
</dl>'''
exceptionsListEntryTemplate = \
'''<dt>Raises <b>{{Name}}</b>:</dt>
<dd>
{{Description}}
</dd>'''
signalsListTemplate = \
'''<h4>Signals</h4>
<dl>
{{Signals}}
</dl>'''
signalsListEntryTemplate = \
'''<dt>{{Name}}</dt>
<dd>
{{Description}}
</dd>'''
eventsListTemplate = \
'''<h4>Events</h4>
<dl>
{{Events}}
</dl>'''
eventsListEntryTemplate = \
'''<dt>{{Name}}</dt>
<dd>
{{Description}}
</dd>'''
deprecatedTemplate = \
'''<p>
<b>Deprecated.</b>
{{Lines}}
</p>'''
authorInfoTemplate = \
'''<p>
<i>Author(s)</i>:
{{Authors}}
</p>'''
seeListTemplate = \
'''<dl>
<dt><b>See Also:</b></dt>
{{Links}}
</dl>'''
seeListEntryTemplate = \
'''<dd>
{{Link}}
</dd>'''
seeLinkTemplate = '''<a style="color:{LinkColor}" {{Link}}'''
sinceInfoTemplate = \
'''<p>
<b>since</b> {{Info}}
</p>'''
indexBodyTemplate = '''
<h1 style="background-color:{Level1HeaderBgColor};color:{Level1HeaderColor}">
{{Title}}</h1>
{{Description}}
{{Subpackages}}
{{Modules}}'''
indexListPackagesTemplate = '''
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Packages</h3>
<table>
{{Entries}}
</table>'''
indexListModulesTemplate = '''
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Modules</h3>
<table>
{{Entries}}
</table>'''
indexListEntryTemplate = \
'''<tr>
<td><a style="color:{LinkColor}" href="{{Link}}">{{Name}}</a></td>
<td>{{Description}}</td>
</tr>'''
|
from flask import render_template
from app import app, db, models
import json
@app.route('/')
@app.route('/index')
def index():
# obtain today's words
# words = models.Words.query.all()
# words = list((str(word[0]), word[1]) for word in db.session.query(models.Words, db.func.count(models.Words.id).label("total")).group_by(models.Words.word).order_by("total DESC"))
data = db.session.query(models.Words, db.func.count(models.Words.id).label("total")).group_by(models.Words.word).order_by("total DESC").all()[:50]
words = [_[0].word for _ in data]
count = [_[1] for _ in data]
return render_template('index.html', words=words, count = count)
|
'''
Copyright 2015 Travel Modelling Group, Department of Civil Engineering, University of Toronto
This file is part of the TMG Toolbox.
The TMG Toolbox is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The TMG Toolbox is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the TMG Toolbox. If not, see <http://www.gnu.org/licenses/>.
'''
'''
Export Count Station Link Correspondence File
Authors: David King
Latest revision by:
[Description]
'''
'''
0.0.1 Created
0.1.1 Created on 2015-03-13 by David King
'''
import inro.modeller as _m
import csv
import traceback as _traceback
from contextlib import contextmanager
from contextlib import nested
_mm = _m.Modeller()
net =_mm.scenario.get_network()
_util = _mm.module('tmg.common.utilities')
_tmgTPB = _mm.module('tmg.common.TMG_tool_page_builder')
class ExportCountStationLocation(_m.Tool()):
version = '0.1.1'
tool_run_msg = ""
number_of_tasks = 1
Scenario = _m.Attribute(_m.InstanceType)
CordonExportFile = _m.Attribute(str)
def __init__(self):
#---Init internal variables
self.TRACKER = _util.ProgressTracker(self.number_of_tasks) #init the ProgressTracker
#---Set the defaults of parameters used by Modeller
self.Scenario = _mm.scenario #Default is primary scenario
def page(self):
pb = _tmgTPB.TmgToolPageBuilder(self, title="Export Count Station-Link Correspondence File v%s" %self.version,
description="Exports a link and countpost correspondence file.\
Contained witin, is the link on which each countpost is found.\
Assumes that count stations are defined by '@stn1'.",
branding_text="- TMG Toolbox")
if self.tool_run_msg != "": # to display messages in the page
pb.tool_run_status(self.tool_run_msg_status)
pb.add_header("EXPORT CORDON DATA FILE")
pb.add_select_file(tool_attribute_name='CordonExportFile',
window_type='save_file', file_filter='*.csv',
title="Cordon Count File",
note="Select Export Location:\
<ul><li>countpost_id</li>\
<li>link id (inode-jnode)</li>\
</ul>")
return pb.render()
def __call__(self, Scen, TruthTable):
self.tool_run_msg = ""
self.TRACKER.reset()
self.Scenario = Scen
self.CordonTruthTable = TruthTable
try:
self._Execute()
except Exception as e:
self.tool_run_msg = _m.PageBuilder.format_exception(
e, _traceback.format_exc())
raise
self.tool_run_msg = _m.PageBuilder.format_info("Done.")
def run(self):
self.tool_run_msg = ""
self.TRACKER.reset()
try:
self._Execute()
except Exception as e:
self.tool_run_msg = _m.PageBuilder.format_exception(
e, _traceback.format_exc())
raise
self.tool_run_msg = _m.PageBuilder.format_info("Done.")
def _Execute(self):
with _m.logbook_trace(name="{classname} v{version}".format(classname=(self.__class__.__name__), version=self.version),
attributes=self._GetAtts()):
lines =[]
for link in net.links():
if int(link['@stn1']) > 0:
lines.append((link['@stn1'],link.id))
with open(self.CordonExportFile, 'w') as writer:
writer.write("Countpost ID ,Link (i-node j-node)")
for line in lines:
line = [str(c) for c in line]
writer.write("\n" + ','.join(line))
#----SUB FUNCTIONS---------------------------------------------------------------------------------
def _GetAtts(self):
atts = {
"Scenario" : str(self.Scenario.id),
"Version": self.version,
"self": self.__MODELLER_NAMESPACE__}
return atts
@_m.method(return_type=_m.TupleType)
def percent_completed(self):
return self.TRACKER.getProgress()
@_m.method(return_type=unicode)
def tool_run_msg_status(self):
return self.tool_run_msg
|
"""
Compatibility module.
This module contains duplicated code from Python itself or 3rd party
extensions, which may be included for the following reasons:
* compatibility
* we may only need a small subset of the copied library/module
"""
import _inspect
import py3k
from _inspect import getargspec, formatargspec
from py3k import *
__all__ = []
__all__.extend(_inspect.__all__)
__all__.extend(py3k.__all__)
|
import sys
from tqdm import tqdm
from common.helpers.output import log
from scenario import Scenario, SwitchAttr, Flag
class GolangDepsUpdate(Scenario):
''' update dependencies of Golang projects packaged in Fedora '''
max_depth = SwitchAttr("--max-depth", int, default=None,
help="specify commit depth limit")
since_date = SwitchAttr("--since-date", str, default=None,
help="specify since date")
skip_errors = Flag("--skip-errors",
help="errors will be reported, but the computation will not be interrupted")
def main(self):
with self.get_system() as system:
golang_pkgs = system.async_call.goland_package_listing()
stored_projects = system.async_call.deps_project_listing()
for pkg in golang_pkgs.result:
if not pkg['name'].startswith('golang-github-'):
log.warning("Skipping %s" % pkg['name'])
# TODO: remove once support for mercurial and full package->upstream translation will be available
continue
try:
raise ValueError("value error")
print("Inspecting '%s'" % pkg['name'])
upstream_url = system.async_call.golang_package2upstream(pkg['name'])
if pkg['name'] in stored_projects.result:
stored_commits = system.async_call.deps_project_commit_listing(pkg['name'])
else:
stored_commits = None
scm_log = system.async_call.scm_log(upstream_url.result,
max_depth=self.max_depth,
since_date=self.since_date)
for commit in tqdm(scm_log.result):
log.debug("Commit %s project %s" % (commit['hash'], pkg['name']))
if not stored_commits or commit not in stored_commits.result:
file_id = system.async_call.scm_store(upstream_url.result, commit['hash'])
deps = system.async_call.deps_analysis(file_id.result)
system.async_call.deps_store_project(pkg['name'], commit['hash'], commit['time'],
deps.result, deps.meta)
except:
exc_info = sys.exc_info()
if self.skip_errors:
log.error(exc_info[2].print_exc())
else:
raise exc_info
if __name__ == '__main__':
sys.exit(1)
|
"""
matstat docstrings
"""
|
from math import sqrt
from bisect import bisect_left
import scipy.stats as st
maxlong = 9223372036854775807
class RunningStat(object):
'''Gather single-pass statistical data from an iterable'''
__slots__ = ('count', 'moments', 'min', 'max')
def __init__(object, moments=1, buckets=1, sorted=False):
self.count = 0
self.moments = [0] * moments # statistical moments
#self.buckets = [0] * buckets # count of items in each bucket
#self.percentiles = [0] * (buckets + 1) # border values between buckets
#self.vk = 0
self.min = None
self.max = None
def __call__(self, iterable, quantifier=float):
'''Wrap an iterable'''
item = next(iterable)
self.count += 1
num = quantifier(item)
if num < self.min: self.min = num
else if num > self.max: self.max = num
#index = bisect_left(self.percentiles, num)
#self.bucket[index] += 1
yield item
def add_to_moments(self, num):
oldmean = self.moments[0]
try: newmean = oldmean + (num - oldmean) / self.count
except ZeroDivisionError: newmean = num
vk = vk + (num - oldmean)(num - newmean)
self.moments[0] = newmean
def __len__(self):
return self.count
def __iadd__(self, other):
if type(other) is str:
_addstr(self, other)
return
for string in other: _addstr(self, string)
#def __enter__(self): pass
def __exit__(self): self._mean = float(self._mean / self.count)
def _addstr(self, string):
words = string.split()
self.count = len(words)
for w in words: self._mean += len(w)
def _mean_(self):
if type(self._mean) is int: __exit__(self)
return self._mean
def append(self, other):
self.count += 1
self.accumulated += len(other)
@property
def mean(self): return self.moments[0]
@property
def variance(self): return self.moments[1]
@property
def kurtosis(self): return self.moments[2]
class Gen(object):
__slots__ = ('inner')
def __init__(self, inner): self.inner = inner
def __iter__(self): return Iter(self, self.inner)
def __len__(self): return len(self.inner)
class Iter(object):
__slots__ = ('generator', 'count', 'inner')
def __new__(cls, gen, iterable, action=None):
if isinstance(iterable, cls):
return iterable
return super().__new__(cls, gen, iterable)
def __init__(self, gen, iterable, action=None):
self.generator = gen
self.count = 0
self.actions = [] if action is None else [action]
self.inner = iterable \
if hasattr(iterable, '__next__') \
else iterable.__iter__()
def __iter__(self): return self
def __next__(self):
r = self.inner.__next__()
for a in self.actions: r = a(r)
self.count += 1
return r
def __len__(self): return self.generator.__len__() - self.count
z_score = st.norm.ppf((1+.95)/2)
z_sqr = z_score*z_score
def wilson_score(positive, n):
'''returns lower bound of Wilson score confidence interval for a Bernoulli
parameter
resource: http://www.evanmiller.org/how-not-to-sort-by-average-rating.html'''
assert positive <= n
if n is 0: return float('NaN')
p = positive / n
zz÷n = z_sqr / n
return (p + zz÷n/2 - z * sqrt((p * (1 - p) + zz÷n/4) / n)) \
/ (1 + zz÷n)
def stats(gen, moments=2, readers=[]):
def generator():
|
import sys
import optparse
from ovirtsdk.xml import params
description = """
RHEV-nagios-table-host-mem output is a script for querying RHEVM via API to get host status
It's goal is to output a table of host/vm status for simple monitoring via external utilities
"""
p = optparse.OptionParser("rhev-nagios-table-host-mem.py [arguments]", description=description)
p.add_option('-v', "--verbosity", dest="verbosity", help="Show messages while running", metavar='[0-n]', default=0,
type='int')
p.add_option("--host", dest="host", help="Show messages while running", metavar='host')
p.add_option("-t", "--table", dest="table", help="Input file in CSV format", metavar='table')
(options, args) = p.parse_args()
if not options.host:
print("Host not defined, exiting")
sys.exit(1)
if not options.table:
print("CSV table not defined, exiting")
sys.exit(1)
try:
f = file(options.table) # fichero a procesar
except:
print("Problem opening the file %s" % options.table)
sys.exit(1)
for line in f:
if line.split(";")[0] == "host":
if line.split(";")[1] == options.host:
usage = int(line.split(";")[4])
retorno = 3
if usage >= 90:
retorno = 1
if usage >= 95:
retorno = 2
else:
retorno = 0
print(usage)
sys.exit(retorno)
|
def get_perm_argparser(self, args):
args = args.split(" ")
if args[0] == "nick":
self.conman.gen_send("Permission level for %s: %s" % (args[1], self.permsman.get_nick_perms(args[1])))
elif args[0] == "cmd":
if args[1].startswith("."):
args[1] = args[1][1:]
self.conman.gen_send("Permission level for %s: %s" % (args[1], self.permsman.get_cmd_perms(args[1])))
elif args[0] == "msg":
self.conman.gen_send("Message permissions for %s: %s" % (args[1], self.permsman.get_msg_perms(args[1])))
def set_perm_argparser(self, args):
args = args.split(" ")
if args[0] == "nick":
self.conman.gen_send("Setting permission level for %s: %s" % (args[1], args[2]))
self.permsman.set_nick_perms(args[1], args[2])
elif args[0] == "cmd":
if args[1].startswith("."):
args[1] = args[1][1:]
self.conman.gen_send("Setting permission level for %s: %s" % (args[1], args[2]))
self.permsman.set_cmd_perms(args[1], args[2])
elif args[0] == "msg":
args[2] = args[2].lower() == "true" or args[2] == "1"
self.conman.gen_send("Setting message permissions for %s: %s" % (args[1], args[2]))
self.permsman.set_msg_perms(args[1], args[2])
self._map("command", "getperm", get_perm_argparser)
self._map("command", "setperm", set_perm_argparser)
|
from .base import BaseInterface
import eventlet
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
from flask import Flask, render_template, session, request, send_from_directory
from flask_socketio import SocketIO, emit, join_room, leave_room, close_room, rooms, disconnect
from werkzeug.utils import secure_filename
import threading, os, time, queue
import logging, sys, json
from ..engine.network import get_allip, get_hostname
import socket
from zeroconf import ServiceInfo, Zeroconf
thread = None
thread_lock = threading.Lock()
REGIE_PATH1 = '/opt/RPi-Regie'
REGIE_PATH2 = '/data/RPi-Regie'
class RegieInterface (BaseInterface):
def __init__(self, hplayer, port, datapath):
super(RegieInterface, self).__init__(hplayer, "Regie")
self._port = port
self._datapath = datapath
self._server = None
# HTTP receiver THREAD
def listen(self):
# Advertize on ZeroConf
zeroconf = Zeroconf()
info = ServiceInfo(
"_http._tcp.local.",
"Regie._"+get_hostname()+"._http._tcp.local.",
addresses=[socket.inet_aton(ip) for ip in get_allip()],
port=self._port,
properties={},
server=get_hostname()+".local.",
)
zeroconf.register_service(info)
# Start server
self.log( "regie interface on port", self._port)
with ThreadedHTTPServer(self, self._port) as server:
self._server = server
self.stopped.wait()
self._server = None
# Unregister ZeroConf
zeroconf.unregister_service(info)
zeroconf.close()
def projectPath(self):
return os.path.join(self._datapath, 'project.json')
def projectRaw(self):
project = '{"pool":[], "project":[[]]}'
if os.path.isfile(self.projectPath()):
with open( self.projectPath(), 'r') as file:
project = file.read()
return project
# parse locally for programatic execution
def reload(self):
try:
self._project = json.loads(self.projectRaw())
except:
self._project = None
self.log("Error while parsing project..")
# print(self._project)
return self._project
# play sequence
def playseq(self, sceneIndex, seqIndex):
self.log("PLAYSEQ")
try:
# self.log('PLAYSEQ', seqIndex, sceneIndex, boxes)
orderz = []
boxes = [b for b in self._project["project"][0][sceneIndex]["allMedias"] if b["y"] == seqIndex]
for b in boxes:
peerName = self._project["pool"][ b["x"] ]["name"]
# MEDIA
order = { 'peer': peerName, 'synchro': True}
if b["media"] in ['stop', 'pause', 'unfade'] :
order["event"] = b["media"]
elif b["media"] == '...':
order["event"] = 'continue'
elif b["media"].startswith('fade'):
order["event"] = 'fade'
order["data"] = b["media"].split('fade ')[1]
else:
order["event"] = 'playthen'
order["data"] = [ self._project["project"][0][sceneIndex]["name"] + '/' + b["media"] ]
# ON MEDIA END
if 'onend' in b:
if b['onend'] == 'next':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex+1] } )
elif b['onend'] == 'prev':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex-1] } )
elif b['onend'] == 'replay':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex] } )
orderz.append(order)
# LOOP
if b["loop"] == 'loop':
orderz.append( { 'peer': peerName, 'event': 'loop', 'data': 1} )
elif b["loop"] == 'unloop':
orderz.append( { 'peer': peerName, 'event': 'unloop'} )
# LIGHT
if b["light"] and b["light"] != '...':
order = { 'peer': peerName, 'synchro': True, 'event': 'esp'}
if b["light"].startswith('light'):
order["data"] = {
'topic': 'leds/all',
'data': b["light"].split('light ')[1]
}
elif b["light"].startswith('preset'):
order["data"] = {
'topic': 'leds/mem',
'data': b["light"].split('preset ')[1]
}
elif b["light"].startswith('off'):
order["data"] = {
'topic': 'leds/stop',
'data': ''
}
orderz.append(order)
self.emit('playingseq', sceneIndex, seqIndex)
self.emit('peers.triggers', orderz, 437)
except:
self.log('Error playing Scene', sceneIndex, 'Seq', seqIndex)
class ThreadedHTTPServer(object):
def __init__(self, regieinterface, port):
self.regieinterface = regieinterface
interface_path = os.path.dirname(os.path.realpath(__file__))
if os.path.isdir(REGIE_PATH1):
www_path = os.path.join(REGIE_PATH1, 'web')
elif os.path.isdir(REGIE_PATH2):
www_path = os.path.join(REGIE_PATH2, 'web')
else:
www_path = os.path.join(interface_path, 'regie')
app = Flask(__name__, template_folder=www_path)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, cors_allowed_origins="*")
#
# FLASK Routing Static
#
@app.route('/')
def index():
# self.regieinterface.log('requesting index')
return send_from_directory(www_path, 'index.html')
@app.route('/<path:path>')
def send_static(path):
# self.regieinterface.log('requesting '+path)
return send_from_directory(www_path, path)
#
# FLASK Routing API
#
# @app.route('/<path:path>')
# def send_static(path):
# # self.regieinterface.log('requesting '+path)
# return send_from_directory(www_path, path)
#
# SOCKETIO Routing
#
self.sendBuffer = queue.Queue()
def background_thread():
while True:
try:
task = self.sendBuffer.get_nowait()
if len(task) > 1: socketio.emit(task[0], task[1])
else: socketio.emit(task[0], None)
self.sendBuffer.task_done()
except queue.Empty:
socketio.sleep(0.1)
@self.regieinterface.hplayer.on('files.dirlist-updated')
def filetree_send(ev, *args):
self.sendBuffer.put( ('data', {'fileTree': self.regieinterface.hplayer.files()}) )
@self.regieinterface.hplayer.on('files.activedir-updated')
def activedir_send(ev, *args):
self.sendBuffer.put( ('data', {'scene': args[1]}) )
@self.regieinterface.hplayer.on('*.peer.*')
def peer_send(ev, *args):
event = ev.split('.')[-1]
if event == 'playingseq':
print(ev, args[0]['data'][1])
self.sendBuffer.put( ('data', {'sequence': args[0]['data'][1]}) )
else:
args[0].update({'type': event})
self.sendBuffer.put( ('peer', args[0]) )
# !!! TODO: stop zyre monitoring when every client are disconnected
@socketio.on('connect')
def client_connect():
self.regieinterface.log('New Remote Regie connected')
@socketio.on('save')
def save(data):
try:
json.loads(data)
with open( os.path.join(self.regieinterface._datapath, 'project.json'), 'w') as file:
file.write(data)
except:
e = str(sys.exc_info()[0])
self.regieinterface.log('fail to save project: '+e+' '+data)
@socketio.on('init')
def init(data):
# send project
emit('data', self.projectData())
# Start update broadcaster
global thread
with thread_lock:
if thread is None:
thread = socketio.start_background_task(target=background_thread)
@socketio.on('register')
def register(data):
# enable peer monitoring
self.regieinterface.emit('peers.getlink')
self.regieinterface.emit('peers.subscribe', ['status', 'settings', 'playingseq'])
@socketio.on('event')
def event(data):
self.regieinterface.emit('peers.triggers', data, 437)
# prepare sub-thread
self.server_thread = threading.Thread(target=lambda:socketio.run(app, host='0.0.0.0', port=port))
self.server_thread.daemon = True
# watchdog project.json
self.watcher()
# internal load project
self.regieinterface.reload()
def start(self):
self.server_thread.start()
def stop(self):
#self.server.stop()
pass
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
def projectData(self):
data={
'fullproject': self.regieinterface.projectRaw(),
'fileTree': self.regieinterface.hplayer.files()
}
return data
def watcher(self):
def onchange(e):
self.regieinterface.log('project updated ! pushing it...')
self.regieinterface.reload()
self.sendBuffer.put( ('data', self.projectData()) )
handler = PatternMatchingEventHandler("*/project.json", None, False, True)
handler.on_any_event = onchange
self.projectObserver = Observer()
self.projectObserver.schedule(handler, os.path.dirname(self.regieinterface.projectPath()))
try:
self.projectObserver.start()
except:
self.regieinterface.log('project.json not found')
|
import re
from pyquery import PyQuery
from novel import serial, utils
BASE_URL = 'http://www.sto.cc/{}-1/'
PAGE_URL = 'http://www.sto.cc/{}-{}/'
class StoTool(utils.Tool):
def __init__(self):
super().__init__()
word_list = (
's思s兔s網s文s檔s下s載s與s在s線s閱s讀s',
's本s作s品s由s思s兔s網s提s供s下s載s與s在s線s閱s讀s',
's本s作s品s由s思s兔s在s線s閱s讀s網s友s整s理s上s傳s',
's思s兔s在s線s閱s讀s',
's思s兔s文s檔s共s享s與s在s線s閱s讀s',
)
symbol_list = (
'\^_\^', ':-\)', '\^o\^', '-_-!',
'││', '//', '\$\$',
)
symbols = '|'.join(symbol_list).join(('(.|', ')'))
pats = (symbols.join(w.split('s')) for w in word_list)
symbol_extras = ('',)
self.remove_extras.extend(
(re.compile(pat) for pat in pats)
)
self.remove_extras.extend(
(re.compile(pat) for pat in symbol_extras)
)
class Sto(serial.SerialNovel):
def __init__(self, tid):
super().__init__(utils.base_to_url(BASE_URL, tid), '#BookContent',
tid=tid)
self.tool = StoTool
def get_title_and_author(self):
st = self.doc('meta').filter(
lambda i, e: PyQuery(e).attr('name') == 'keywords'
).attr('content')
return re.match(r'(.*?),(.*?),.*', st).groups()
@property
def chapter_list(self):
st = re.search(
r'ANP_goToPage\("Page_select",(\d+),(\d+),1\);', self.doc.html())
if st.group(1) == self.tid:
page_num = int(st.group(2))
else:
raise Exception('Something strange may happened.')
return [(i + 1, PAGE_URL.format(self.tid, i + 1), '第{:d}頁'.format(i + 1))
for i in range(page_num)]
def get_intro(self):
intro = self.doc('meta').filter(
lambda i, e: PyQuery(e).attr('name') == 'description'
).attr('content')
return intro
|
"""
Convert wiggle data to a binned array. This assumes the input data is on a
single chromosome and does no sanity checks!
usage: %prog score_file out_file < wiggle_data
-c, --comp=type: compression type (none, zlib, lzo)
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from bx_extras.fpconst import isNaN
from bx.cookbook import doc_optparse
from bx import misc
def main():
# Parse command line
options, args = doc_optparse.parse( __doc__ )
try:
if options.comp:
comp_type = options.comp
else:
comp_type = None
score_fname = args[0]
out_fname = args[1]
except:
doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
if comp_type:
scores.to_file( out, comp_type=comp_type )
else:
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('problems', '0018_origintag_helptexts'),
]
operations = [
migrations.CreateModel(
name='AlgorithmTagProposal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('problem', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='problems.Problem')),
('tag', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='problems.AlgorithmTag')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'algorithm tag proposal',
'verbose_name_plural': 'algorithm tag proposals',
},
),
migrations.CreateModel(
name='DifficultyProposal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('difficulty', models.CharField(max_length=10)),
('problem', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='problems.Problem')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'difficulty proposal',
'verbose_name_plural': 'difficulty proposals',
},
),
]
|
from __future__ import unicode_literals
import DjangoUeditor.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blogadmin', '0006_auto_20170827_1142'),
]
operations = [
migrations.CreateModel(
name='BookReview',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=128, null=True, verbose_name='\u6807\u9898')),
('tag', models.CharField(blank=True, max_length=32, null=True, verbose_name='\u6807\u7b7e')),
('pub_time', models.DateTimeField(auto_now_add=True, verbose_name='\u53d1\u5e03\u65f6\u95f4')),
('update_time', models.DateTimeField(auto_now=True, null=True, verbose_name='\u66f4\u65b0\u65f6\u95f4')),
('content', DjangoUeditor.models.UEditorField(blank=True, default='', verbose_name='\u6b63\u6587')),
],
options={
'ordering': ['-update_time'],
'verbose_name': '\u4e66\u520a\u8bc4\u8bba',
'verbose_name_plural': '\u4e66\u520a\u8bc4\u8bba',
},
),
migrations.CreateModel(
name='Essay',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=128, null=True, verbose_name='\u6807\u9898')),
('tag', models.CharField(blank=True, max_length=32, null=True, verbose_name='\u6807\u7b7e')),
('pub_time', models.DateTimeField(auto_now_add=True, verbose_name='\u53d1\u5e03\u65f6\u95f4')),
('update_time', models.DateTimeField(auto_now=True, null=True, verbose_name='\u66f4\u65b0\u65f6\u95f4')),
('content', DjangoUeditor.models.UEditorField(blank=True, default='', verbose_name='\u6b63\u6587')),
],
options={
'ordering': ['-update_time'],
'verbose_name': '\u6742\u6587',
'verbose_name_plural': '\u6742\u6587',
},
),
migrations.CreateModel(
name='FilmReview',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=128, null=True, verbose_name='\u6807\u9898')),
('tag', models.CharField(blank=True, max_length=32, null=True, verbose_name='\u6807\u7b7e')),
('pub_time', models.DateTimeField(auto_now_add=True, verbose_name='\u53d1\u5e03\u65f6\u95f4')),
('update_time', models.DateTimeField(auto_now=True, null=True, verbose_name='\u66f4\u65b0\u65f6\u95f4')),
('content', DjangoUeditor.models.UEditorField(blank=True, default='', verbose_name='\u6b63\u6587')),
],
options={
'ordering': ['-update_time'],
'verbose_name': '\u5f71\u89c6\u8bc4\u8bba',
'verbose_name_plural': '\u5f71\u89c6\u8bc4\u8bba',
},
),
migrations.AlterModelOptions(
name='article',
options={'ordering': ['-update_time'], 'verbose_name': '\u6280\u672f\u7c7b\u535a\u5ba2', 'verbose_name_plural': '\u6280\u672f\u7c7b\u535a\u5ba2'},
),
migrations.AlterField(
model_name='article',
name='category',
field=models.CharField(choices=[('web', 'Web\u5f00\u53d1'), ('linux', '\u7cfb\u7edf\u8fd0\u7ef4'), ('algorithm', '\u7b97\u6cd5'), ('language', '\u7f16\u7a0b\u8bed\u8a00'), ('others', '\u5176\u4ed6')], default='web', max_length=64, verbose_name='\u7c7b\u522b'),
),
]
|
from twisted.trial import unittest
from rtpmidi.engines.midi.recovery_journal_chapters import *
class TestNote(unittest.TestCase):
def setUp(self):
self.note = Note()
def test_note_on(self):
#simple
note_to_test = self.note.note_on(100, 90)
#Testing type
assert(type(note_to_test)==str), self.fail("Wrong type return")
#length test
assert(len(note_to_test)==2), \
self.fail("len of note On is higher than 2 octet")
#with all args
note_to_test = self.note.note_on(100, 90, 0, 1)
#length test
assert(len(note_to_test)==2), \
self.fail("len of note On is higher than 2 octet")
def test_parse_note_on(self):
#Simple
note_to_test = self.note.note_on(100, 90)
res_n = self.note.parse_note_on(note_to_test)
#Testing content
assert(res_n[1] == 100), self.fail("Note number is not respected")
assert(res_n[3] == 90), self.fail("Note velocity is not respected")
#With all args
note_to_test = self.note.note_on(100, 90, 0, 1)
res_n = self.note.parse_note_on(note_to_test)
#Testing content
assert(res_n[0] == 1), self.fail("S mark is not respected")
assert(res_n[1] == 100), self.fail("Note number is not respected")
assert(res_n[2] == 0), self.fail("Y mark not respected")
assert(res_n[3] == 90), self.fail("Note velocity is not respected")
def test_note_off(self):
#list of notes to test (note from the same midi channel)
plist = [[[128, 57, 100],1000], [[144, 4, 0],1000], \
[[144, 110, 0],1000], [[144, 112, 0],1000]]
#setting low and high like in create_chapter_n
high = 113 / 8
low = 4 / 8
#selecting note off like in create_chapter_n
note_off_list = [ plist[i][0][1] for i in range(len(plist))\
if (plist[i][0][0]&240 == 128) or \
(plist[i][0][2] == 0) ]
res = self.note.note_off(note_off_list, low, high)
#type test
assert(type(res)==str), self.fail("Wrong type return")
#checking size
size_wait = high - low + 1
assert(len(res) == size_wait), \
self.fail("Problem of size with note off creation")
def test_parse_note_off(self):
"""Test parse note off"""
#list of notes to test
#plist = [[[128, 120, 100],1000],[[145, 4, 0],1000],\
# [[145, 110, 0],1000], [[145, 112, 0],1000]]
#setting low and high like in create_chapter_n
note_off_test = [12, 57, 112, 114 ]
high = 115 / 8
low = 12 / 8
res = self.note.note_off(note_off_test, low, high)
#testing the result of parsing
res_parsed = self.note.parse_note_off(res, low, high)
#Testing type
assert(type(res_parsed)==list), self.fail("Wrong type returned")
#res_parsed.sort()
#Testing content
note_off_test = [12, 57, 112, 114 ]
for i in range(len(note_off_test)):
assert(res_parsed[i][1]==note_off_test[i]), \
self.fail("Problem getting the good value for note off encoded")
class TestChapterP(unittest.TestCase):
def setUp(self):
self.chapter_p = ChapterP()
#program change with msb and lsb
self.plist = [[[176, 0, 75], 1000], [[176, 32, 110], 1000], \
[[192, 110, 0], 1000]]
#program change without msb and lsb
self.plist_1 = [[[192, 110, 0], 1000]]
def test_update(self):
"""Testing chapter P creation from a list (with MSB and LSB)"""
self.chapter_p.update(self.plist)
chapter = self.chapter_p.content
#Testing len
assert(len(chapter)==3), \
self.fail("Size of chapter p is not 24 bits!!!")
#Testing type
assert(type(chapter)==str), self.fail("Problem of type")
#Testing content
size, chapter_parse, marker_s, marker_x, marker_b \
= self.chapter_p.parse(chapter)
#Testing content
assert(marker_s==1), \
self.fail("Problem getting right value of S")
assert(chapter_parse[0][1]==110), \
self.fail("Problem getting right value of PROGRAM")
assert(marker_b==1), \
self.fail("Problem getting right value of B")
assert(chapter_parse[1][2]==75), \
self.fail("Problem getting right value of MSB")
assert(marker_x==0), \
self.fail("Problem getting right value of X")
assert(chapter_parse[2][2]==110), \
self.fail("Problem getting right value of LSB")
def test_update_1(self):
"""Testing chapter P creation from a list (without MSB and LSB)"""
self.chapter_p.update(self.plist_1)
chapter = self.chapter_p.content
#Testing len
assert(len(chapter)==3), \
self.fail("Size of chapter p is not 24 bits!!!")
#Testing type
assert(type(chapter)==str), self.fail("Problem of type")
#Testing content
size, chapter_parse, marker_s, marker_x, marker_b \
= self.chapter_p.parse(chapter)
#Testing content
assert(marker_s==1), \
self.fail("Problem getting right value of S")
assert(chapter_parse[0][1]==110), \
self.fail("Problem getting right value of PROGRAM")
assert(marker_b==0), \
self.fail("Problem getting right value of B")
assert(marker_x==0), \
self.fail("Problem getting right value of X")
class TestChapterC(unittest.TestCase):
def setUp(self):
self.chapter_c = ChapterC()
self.plist = []
for i in range(127):
self.plist.append([[176, i, 100],6])
def test_header(self):
"""Test header creation ChapterC"""
#Creating header
header = self.chapter_c.header(10, 1)
#Testing type
assert(type(header)==str), self.fail("Wrong type returned")
#Testing length
assert(len(header)==1), self.fail("Wrong header size")
def test_parse_header(self):
"""Test header parsing ChapterC"""
#Creating header
header = self.chapter_c.header(10, 1)
#Parsing header
header_parsed = self.chapter_c.parse_header(header)
#Testing type
assert(type(header_parsed)==tuple), self.fail("Wrong size returned")
#Testing content
assert(header_parsed[0]==1), self.fail("Wrong marker_s value")
assert(header_parsed[1]==10), self.fail("Wrong length value")
def test_create_log_c(self):
"""Test create log C (individual component from ChapterC"""
res = self.chapter_c.create_log_c(0, 110, 1, 90)
assert(type(res)==str), self.fail("Wrong type returned")
assert(len(res)==2), self.fail("Wrong size returned")
def test_parse_log_c(self):
"""Test parsing individual component from chapterC"""
res = self.chapter_c.create_log_c(0, 110, 1, 90)
res_parsed = self.chapter_c.parse_log_c(res)
assert(res_parsed[0]==0), self.fail("Wrong value for marker_s")
assert(res_parsed[1]==110), self.fail("Wrong value for number")
assert(res_parsed[2]==1), self.fail("Wrong value for marker_a")
assert(res_parsed[3]==90), self.fail("Wrong value for value")
def test_update(self):
"""Testing chapter C creation"""
self.chapter_c.update(self.plist)
assert(type(self.chapter_c.content)==str), self.fail("Wrong type returned")
#length calc header == 1 + 2 * length
length_wait = 1 + 2 * len(self.plist)
assert(len(self.chapter_c.content)==length_wait), self.fail("Wrong length returned")
def test_update_1(self):
self.plist.append([[176, 42, 100],6])
self.chapter_c.update(self.plist)
length_wait = 1 + 2 * 127
assert(len(self.chapter_c.content)==length_wait), self.fail("Wrong length returned")
def test_parse(self):
"""Test chapter C parsing"""
self.chapter_c.update(self.plist)
size, parsed_res, marker_s = self.chapter_c.parse(self.chapter_c.content)
assert(len(parsed_res)==len(self.plist)), \
self.fail("Wrong number of command returned")
for i in range(len(self.plist)):
assert(parsed_res[i][0]==self.plist[i][0][0]), \
self.fail("Wrong value returned for cmd")
assert(parsed_res[i][1]==self.plist[i][0][1]), \
self.fail("Wrong value returned for pitch")
assert(parsed_res[i][2]==self.plist[i][0][2]), \
self.fail("Wrong value returned for velocity")
def test_trim(self):
plist = []
plist.append([[176, 42, 100],6])
plist.append([[176, 43, 100],7])
plist.append([[176, 44, 100],8])
self.chapter_c.update(plist)
self.chapter_c.trim(7)
assert(len(self.chapter_c.controllers)==1), self.fail("Problem erasing controllers on trim")
def test_update_highest(self):
plist = []
plist.append([[176, 42, 100],6])
plist.append([[176, 43, 100],7])
plist.append([[176, 44, 100],8])
self.chapter_c.update(plist)
assert(self.chapter_c.highest==8), \
self.fail("Problem with highest on update")
self.chapter_c.trim(7)
assert(self.chapter_c.highest==8), \
self.fail("Problem with highest on trim(1)")
self.chapter_c.trim(8)
assert(self.chapter_c.highest==0), \
self.fail("Problem with highest on trim(2)")
class TestChapterW(unittest.TestCase):
def setUp(self):
self.chapter_w = ChapterW()
self.plist = [[[224, 0, 120], 6], [[224, 1, 110], 6]]
def test_update(self):
"""Test create chapter W"""
self.chapter_w.update(self.plist)
assert(type(self.chapter_w.content)==str), self.fail("Wrong type returned")
assert(len(self.chapter_w.content)==2), \
self.fail("Wrong size for chapter W part in recovery journal")
def test_parse(self):
self.chapter_w.update(self.plist)
size, res_2, mark_s = self.chapter_w.parse(self.chapter_w.content)
assert(mark_s == 1), \
self.fail("Wrong value for S bit in Chapter W")
assert(res_2[0][2]==120), \
self.fail("Wrong value for wheel_1 in Chapter W")
assert(res_2[1][2]==110), \
self.fail("Wrong value for wheel_2 in Chapter W")
def test_trim(self):
self.chapter_w.update(self.plist)
self.chapter_w.trim(6)
for data in self.chapter_w.data_list:
assert(data[0]==0), self.fail("Problem trimming chapter")
assert(self.chapter_w.highest==0), self.fail("Wrong update for highest")
class TestChapterN(unittest.TestCase):
def setUp(self):
self.chapter_n = ChapterN()
self.plist_on = []
self.plist_off = []
#List of notes to test
#Note on
for i in range(127):
self.plist_on.append([[144, i, 100],6])
#Note off
for i in range(127):
self.plist_off.append([[128, i, 100],7])
def test_header(self):
"""Test Create header of chapterN """
#Creating chapter
self.chapter_n.update(self.plist_on)
res = self.chapter_n.header()
#length type test
assert(len(res)==2), self.fail("length of header is not good")
assert(type(res)==str), self.fail("Wrong type return")
def test_parse_header(self):
"""Test parse header of ChapterN"""
#Creating chapter
self.chapter_n.update(self.plist_off)
res = self.chapter_n.header()
#Parsing
res_parsed = self.chapter_n.parse_header(res)
#Testing type
assert(type(res_parsed)==tuple), self.fail("Wrong type return")
#Testing content
assert(res_parsed[1]==0), \
self.fail("Problem getting good value of LEN")
assert(res_parsed[2]==0), \
self.fail("Problem getting good value of LOW")
assert(res_parsed[3]==15), \
self.fail("Problem getting good value of HIGH")
def test_update(self):
"""Update with 127 note_off"""
self.chapter_n.update(self.plist_off)
#Test len content
length_wait = 128 / 8 + 2
assert(len(self.chapter_n.content)==length_wait), \
self.fail("Wrong size for chapter encoded returned")
#Test note_on
assert(len(self.chapter_n.note_on)==0), \
self.fail("Wrong nb of note on recorded")
#Test note_off
assert(len(self.chapter_n.note_off)==127), \
self.fail("Wrong nb of note off recorded")
#Test low
assert(self.chapter_n.low==0), self.fail("Wrong low calculation")
#Test high
assert(self.chapter_n.high==15), self.fail("Wrong high calculation")
#TEst highest
assert(self.chapter_n.highest==7), self.fail("Wrong highest saved")
def test_update_1(self):
"""Update with 127 note_on"""
self.chapter_n.update(self.plist_on)
#Test len content
length_wait = 127 * 2 + 2
assert(len(self.chapter_n.content)==length_wait), \
self.fail("Wrong size for chapter encoded returned")
#Test note_on
assert(len(self.chapter_n.note_on)==127), \
self.fail("Wrong nb of note on recorded")
#Test note_off
assert(len(self.chapter_n.note_off)==0), \
self.fail("Wrong nb of note off recorded")
#Test low
assert(self.chapter_n.low==0), self.fail("Wrong low calculation")
#Test high
assert(self.chapter_n.high==0), self.fail("Wrong high calculation")
#TEst highest
assert(self.chapter_n.highest==6), self.fail("Wrong highest saved")
def test_update_2(self):
"""Update with note_on / off and ..."""
self.plist_on.append([[144, 42, 100],6])
self.chapter_n.update(self.plist_on)
#Test len content
length_wait = 127 * 2 + 2
assert(len(self.chapter_n.content)==length_wait), \
self.fail("Wrong size for chapter encoded returned")
assert(len(self.chapter_n.note_on)==127), \
self.fail("Wrong nb of note on recorded")
self.chapter_n.update(self.plist_off)
#Test len content
length_wait = 128 / 8 + 2
assert(len(self.chapter_n.content)==length_wait), \
self.fail("Wrong size for chapter encoded returned")
#Test note_on
assert(len(self.chapter_n.note_on)==0), \
self.fail("Wrong nb of note on recorded")
#Test note_off
assert(len(self.chapter_n.note_off)==127), \
self.fail("Wrong nb of note off recorded")
def test_parse(self):
""" Test parse chapter N with several notes"""
#creating chapter
self.chapter_n.update(self.plist_off)
size, notes_parsed = self.chapter_n.parse(self.chapter_n.content)
assert(len(notes_parsed)==127), self.fail("Wrong number of notes returned")
assert(size==18), self.fail("Wrong size of encoded chapter")
def test_parse_2(self):
off_mont = [[[128, 62, 100],1000]]
self.chapter_n.update(off_mont)
size, notes_parsed = self.chapter_n.parse(self.chapter_n.content)
def test_trim(self):
self.chapter_n.update(self.plist_off)
self.chapter_n.trim(6)
#Test highest
assert(self.chapter_n.highest==7), \
self.fail("Wrong highest saved")
#Test note_on
assert(len(self.chapter_n.note_on)==0), \
self.fail("Wrong nb of note on recorded")
#Test note_off
assert(len(self.chapter_n.note_off)==127), \
self.fail("Wrong nb of note off recorded")
self.chapter_n.trim(7)
assert(len(self.chapter_n.note_off)==0), \
self.fail("Wrong nb of note off recorded after trim")
def test_update_highest(self):
plist = []
plist.append([[144, 1, 100],6])
plist.append([[144, 1, 100],7])
plist.append([[144, 1, 100],8])
self.chapter_n.update(plist)
assert(self.chapter_n.highest==8), \
self.fail("wrong update of highest on update")
self.chapter_n.trim(7)
assert(self.chapter_n.highest==8), \
self.fail("wrong update of highest on trim")
self.chapter_n.trim(8)
assert(self.chapter_n.highest==0), \
self.fail("wrong update of highest on trim")
class TestChapterT(unittest.TestCase):
def setUp(self):
self.chap_t = ChapterT()
def test_update(self):
"""Test Create Chapter T (After Touch)"""
plist = [[[208, 80, 98], 1000]]
self.chap_t.update(plist)
res = self.chap_t.content
assert(type(res)==str), self.fail("Wrong type returned")
assert(len(res) == 1), self.fail("Wrong size returned")
assert(self.chap_t.highest==1000), self.fail("Problem with highest update")
def test_parse(self):
"""Test parse Chapter T"""
self.chap_t.update( [[[208, 80, 0], 1000]])
res = self.chap_t.content
size, midi_cmd = self.chap_t.parse(res)
pressure = midi_cmd[0][1]
assert(size==1), self.fail("Wrong size returned")
assert(pressure==80), self.fail("Wrong value returned for pressure")
class TestChapterA(unittest.TestCase):
def setUp(self):
self.chap_a = ChapterA()
def test_header(self):
"""Test header for Chapter A"""
res = self.chap_a.header(1, 127)
assert(type(res)==str), self.fail("Wrong type returned")
assert(len(res)==1), self.fail("Wrong size returned")
def test_parse_header(self):
"""Test parse header Chapter A"""
res = self.chap_a.header(1, 127)
marker_s, length = self.chap_a.parse_header(res)
assert(marker_s==1), self.fail("Wrong value returned for marker S")
assert(length==127), self.fail("Wrong value returned for length")
def test_create_log_a(self):
"""Test Create log A"""
res = self.chap_a.create_log_a(1, 127, 1, 127)
assert(type(res)==str), self.fail("Wrong type returned")
assert(len(res)==2), self.fail("Wrong size returned")
def test_parse_log_a(self):
"""Test Parse log A"""
res = self.chap_a.create_log_a(1, 127, 1, 110)
marker_s, notenum, marker_x, pressure = self.chap_a.parse_log_a(res)
assert(marker_s==1), self.fail("Wrong value returned for marker S")
assert(notenum==127), self.fail("Wrong value returned for length")
assert(marker_x==1), self.fail("Wrong value returned for marker S")
assert(pressure==110), self.fail("Wrong value returned for length")
def test_update(self):
"""Test create Chapter A"""
midi_cmd = [[[160, 80, 98], 1000], [[160, 82, 90], 1000]]
self.chap_a.update(midi_cmd)
res = self.chap_a.content
len_expected = 1 + 2 * len(midi_cmd)
assert(type(res)==str), self.fail("Wrong type returned")
assert(len(res)==len_expected), self.fail("Wrong size returned")
def test_update_1(self):
"""Test create Chapter A with a big amount of commands"""
#With 127 notes (max is 127)
midi_cmd = []
for i in range(127):
midi_cmd.append([[160, i, 98], 1])
self.chap_a.update(midi_cmd)
#Test content
res = self.chap_a.content
size, marker_s, midi_cmd_parsed = self.chap_a.parse(res)
size_waited = 1 + 2 *127
assert(size==size_waited), self.fail("Wrong size returned for 127 notes(1) !")
midi_cmd = []
midi_cmd.append([[160, 42, 98], 2])
self.chap_a.update(midi_cmd)
#Test content
res = self.chap_a.content
size, marker_s, midi_cmd_parsed = self.chap_a.parse(res)
assert(size==size_waited), self.fail("Wrong size returned for 127 notes(2) !")
def test_update_2(self):
"""Test create Chapter A with a big amount of commands
in a lonely function call"""
#With 127 notes (max is 127)
midi_cmd = []
for i in range(127):
midi_cmd.append([[160, i, 98], 1])
for i in range(127):
midi_cmd.append([[160, i, 98], 1])
self.chap_a.update(midi_cmd)
#Test content
res = self.chap_a.content
size, marker_s, midi_cmd_parsed = self.chap_a.parse(res)
size_waited = 1 + 2 *127
assert(size==size_waited), self.fail("Wrong size returned for 127 notes(1) !")
def test_parse(self):
"""Test parsing chapterA"""
midi_cmd = [[[160, 80, 98], 1000], [[160, 82, 90], 1000]]
self.chap_a.update(midi_cmd)
res = self.chap_a.content
size, marker_s, midi_cmd_parsed = self.chap_a.parse(res)
assert(size==5), self.fail("Wrong value for size returned")
assert(marker_s==1), self.fail("Wrong value for marker_s returned")
assert(len(midi_cmd)==len(midi_cmd)), self.fail("Wrong size returned")
for i in range(len(midi_cmd)):
assert(midi_cmd[i][0]==midi_cmd_parsed[i]), \
self.fail("Wrong value returned")
def test_trim(self):
"""Test trim without note remplacement"""
#Adding Packet 1000
midi_cmd = [[[160, 80, 98], 1000], [[160, 82, 90], 1000]]
self.chap_a.update(midi_cmd)
#Adding Packet 1001
midi_cmd = [[[160, 84, 98], 1001], [[160, 86, 90], 1001]]
self.chap_a.update(midi_cmd)
#Adding Packet 1002
midi_cmd = [[[160, 88, 98], 1002], [[160, 90, 90], 1002]]
self.chap_a.update(midi_cmd)
self.chap_a.trim(1001)
res = self.chap_a.parse(self.chap_a.content)
def test_update_highest(self):
#Adding Packet 1000
midi_cmd = [[[160, 80, 98], 1000], [[160, 82, 90], 1000]]
self.chap_a.update(midi_cmd)
self.chap_a.update_highest()
assert(self.chap_a.highest==1000), \
self.fail("Update problem for highest after an update")
#Adding Packet 1001
midi_cmd = [[[160, 84, 98], 1001], [[160, 86, 90], 1001]]
self.chap_a.update(midi_cmd)
self.chap_a.update_highest()
assert(self.chap_a.highest==1001), \
self.fail("Update problem for highest after an update")
self.chap_a.trim(1001)
assert(self.chap_a.highest==0), \
self.fail("Update problem for highest after an trim")
|
""" user data tab """
try:
from taurus.external.qt import Qt
except Exception:
from taurus.qt import Qt
from .EdListDlg import EdListWg
import logging
logger = logging.getLogger(__name__)
class Data(Qt.QObject):
""" User data tab widget
"""
#: (:class:`taurus.qt.Qt.pyqtSignal`) dirty signal
dirty = Qt.pyqtSignal()
def __init__(self, ui, state=None, simpleMode=False):
""" constructor
:param ui: ui instance
:type ui: :class:`taurus.qt.qtgui.util.ui.__UI`
:param state: server state
:type state: :class:`nxsselector.ServerState.ServerState`
:param simpleMode: if simple display mode
:type simpleMode: :obj:`bool`
"""
Qt.QObject.__init__(self)
#: (:class:`taurus.qt.qtgui.util.ui.__UI`) ui instance
self.ui = ui
#: (:class:`nxsselector.ServerState.ServerState`) server state
self.state = state
#: (:class:`taurus.qt.Qt.QLayout`)
self.glayout = None
#: (:obj:`bool`) if simple view mode
self.__simpleMode = simpleMode
#: (:class:`nxsselector.EdListWg.EdListWg`) table editing widget
self.form = EdListWg(self.ui.data)
def createGUI(self):
""" creates widget GUI
"""
self.ui.data.hide()
if self.glayout:
child = self.glayout.takeAt(0)
while child:
self.glayout.removeItem(child)
if isinstance(child, Qt.QWidgetItem):
self.glayout.removeWidget(child.widget())
child = self.glayout.takeAt(0)
self.form.dirty.disconnect(self.__setDirty)
else:
self.glayout = Qt.QHBoxLayout(self.ui.data)
if self.form:
self.form.setParent(None)
if self.__simpleMode:
self.form.disable = self.state.admindata
self.form.record = self.state.datarecord
names = self.state.clientRecords()
logger.debug("NAMES: %s " % names)
self.form.available_names = names
self.form.createGUI()
self.glayout.addWidget(self.form)
self.ui.data.update()
if self.ui.tabWidget.currentWidget() == self.ui.data:
self.ui.data.show()
self.form.dirty.connect(self.__setDirty)
def reset(self):
""" recreates widget GUI
"""
self.createGUI()
@Qt.pyqtSlot()
def __setDirty(self):
""" emits the `dirty` signal
"""
self.dirty.emit()
|
import json
import sys
import os
import urllib
import logging
import re
import time
import errno
import uuid
import datetime
from bs4 import BeautifulSoup
import geoserver
import httplib2
from urlparse import urlparse
from urlparse import urlsplit
from threading import local
from collections import namedtuple
from itertools import cycle, izip
from lxml import etree
import xml.etree.ElementTree as ET
from decimal import Decimal
from owslib.wcs import WebCoverageService
from owslib.util import http_post
from django.core.exceptions import ImproperlyConfigured
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import pre_delete
from django.template.loader import render_to_string
from django.conf import settings
from django.utils.translation import ugettext as _
from dialogos.models import Comment
from agon_ratings.models import OverallRating
from gsimporter import Client
from owslib.wms import WebMapService
from geoserver.store import CoverageStore, DataStore, datastore_from_index,\
coveragestore_from_index, wmsstore_from_index
from geoserver.workspace import Workspace
from geoserver.catalog import Catalog
from geoserver.catalog import FailedRequestError, UploadError
from geoserver.catalog import ConflictingDataError
from geoserver.resource import FeatureType, Coverage
from geoserver.support import DimensionInfo
from geonode import GeoNodeException
from geonode.layers.utils import layer_type, get_files
from geonode.layers.models import Layer, Attribute, Style
from geonode.layers.enumerations import LAYER_ATTRIBUTE_NUMERIC_DATA_TYPES
logger = logging.getLogger(__name__)
if not hasattr(settings, 'OGC_SERVER'):
msg = (
'Please configure OGC_SERVER when enabling geonode.geoserver.'
' More info can be found at '
'http://docs.geonode.org/en/master/reference/developers/settings.html#ogc-server')
raise ImproperlyConfigured(msg)
def check_geoserver_is_up():
"""Verifies all geoserver is running,
this is needed to be able to upload.
"""
url = "%sweb/" % ogc_server_settings.LOCATION
resp, content = http_client.request(url, "GET")
msg = ('Cannot connect to the GeoServer at %s\nPlease make sure you '
'have started it.' % ogc_server_settings.LOCATION)
assert resp['status'] == '200', msg
def _add_sld_boilerplate(symbolizer):
"""
Wrap an XML snippet representing a single symbolizer in the appropriate
elements to make it a valid SLD which applies that symbolizer to all features,
including format strings to allow interpolating a "name" variable in.
"""
return """
<StyledLayerDescriptor version="1.0.0" xmlns="http://www.opengis.net/sld" xmlns:ogc="http://www.opengis.net/ogc"
xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd">
<NamedLayer>
<Name>%(name)s</Name>
<UserStyle>
<Name>%(name)s</Name>
<Title>%(name)s</Title>
<FeatureTypeStyle>
<Rule>
""" + symbolizer + """
</Rule>
</FeatureTypeStyle>
</UserStyle>
</NamedLayer>
</StyledLayerDescriptor>
"""
_raster_template = """
<RasterSymbolizer>
<Opacity>1.0</Opacity>
</RasterSymbolizer>
"""
_polygon_template = """
<PolygonSymbolizer>
<Fill>
<CssParameter name="fill">%(bg)s</CssParameter>
</Fill>
<Stroke>
<CssParameter name="stroke">%(fg)s</CssParameter>
<CssParameter name="stroke-width">0.7</CssParameter>
</Stroke>
</PolygonSymbolizer>
"""
_line_template = """
<LineSymbolizer>
<Stroke>
<CssParameter name="stroke">%(bg)s</CssParameter>
<CssParameter name="stroke-width">3</CssParameter>
</Stroke>
</LineSymbolizer>
</Rule>
</FeatureTypeStyle>
<FeatureTypeStyle>
<Rule>
<LineSymbolizer>
<Stroke>
<CssParameter name="stroke">%(fg)s</CssParameter>
</Stroke>
</LineSymbolizer>
"""
_point_template = """
<PointSymbolizer>
<Graphic>
<Mark>
<WellKnownName>%(mark)s</WellKnownName>
<Fill>
<CssParameter name="fill">%(bg)s</CssParameter>
</Fill>
<Stroke>
<CssParameter name="stroke">%(fg)s</CssParameter>
</Stroke>
</Mark>
<Size>10</Size>
</Graphic>
</PointSymbolizer>
"""
_style_templates = dict(
raster=_add_sld_boilerplate(_raster_template),
polygon=_add_sld_boilerplate(_polygon_template),
line=_add_sld_boilerplate(_line_template),
point=_add_sld_boilerplate(_point_template)
)
def _style_name(resource):
return _punc.sub("_", resource.store.workspace.name + ":" + resource.name)
def get_sld_for(layer):
# FIXME: GeoServer sometimes fails to associate a style with the data, so
# for now we default to using a point style.(it works for lines and
# polygons, hope this doesn't happen for rasters though)
name = layer.default_style.name if layer.default_style is not None else "point"
# FIXME: When gsconfig.py exposes the default geometry type for vector
# layers we should use that rather than guessing based on the auto-detected
# style.
if name in _style_templates:
fg, bg, mark = _style_contexts.next()
return _style_templates[name] % dict(
name=layer.name,
fg=fg,
bg=bg,
mark=mark)
else:
return None
def fixup_style(cat, resource, style):
logger.debug("Creating styles for layers associated with [%s]", resource)
layers = cat.get_layers(resource=resource)
logger.info("Found %d layers associated with [%s]", len(layers), resource)
for lyr in layers:
if lyr.default_style.name in _style_templates:
logger.info("%s uses a default style, generating a new one", lyr)
name = _style_name(resource)
if style is None:
sld = get_sld_for(lyr)
else:
sld = style.read()
logger.info("Creating style [%s]", name)
style = cat.create_style(name, sld)
lyr.default_style = cat.get_style(name)
logger.info("Saving changes to %s", lyr)
cat.save(lyr)
logger.info("Successfully updated %s", lyr)
def cascading_delete(cat, layer_name):
resource = None
try:
if layer_name.find(':') != -1:
workspace, name = layer_name.split(':')
ws = cat.get_workspace(workspace)
try:
store = get_store(cat, name, workspace=ws)
except FailedRequestError:
if ogc_server_settings.DATASTORE:
try:
store = get_store(cat, ogc_server_settings.DATASTORE, workspace=ws)
except FailedRequestError:
logger.debug(
'the store was not found in geoserver')
return
else:
logger.debug(
'the store was not found in geoserver')
return
if ws is None:
logger.debug(
'cascading delete was called on a layer where the workspace was not found')
return
resource = cat.get_resource(name, store=store, workspace=workspace)
else:
resource = cat.get_resource(layer_name)
except EnvironmentError as e:
if e.errno == errno.ECONNREFUSED:
msg = ('Could not connect to geoserver at "%s"'
'to save information for layer "%s"' % (
ogc_server_settings.LOCATION, layer_name)
)
logger.warn(msg, e)
return None
else:
raise e
if resource is None:
# If there is no associated resource,
# this method can not delete anything.
# Let's return and make a note in the log.
logger.debug(
'cascading_delete was called with a non existent resource')
return
resource_name = resource.name
lyr = cat.get_layer(resource_name)
if(lyr is not None): # Already deleted
store = resource.store
styles = lyr.styles + [lyr.default_style]
cat.delete(lyr)
for s in styles:
if s is not None and s.name not in _default_style_names:
try:
cat.delete(s, purge='true')
except FailedRequestError as e:
# Trying to delete a shared style will fail
# We'll catch the exception and log it.
logger.debug(e)
# Due to a possible bug of geoserver, we need this trick for now
# TODO: inspect the issue reported by this hack. Should be solved
# with GS 2.7+
try:
cat.delete(resource, recurse=True) # This may fail
except:
cat.reload() # this preservers the integrity of geoserver
if store.resource_type == 'dataStore' and 'dbtype' in store.connection_parameters and \
store.connection_parameters['dbtype'] == 'postgis':
delete_from_postgis(resource_name)
elif store.type and store.type.lower() == 'geogig':
# Prevent the entire store from being removed when the store is a
# GeoGig repository.
return
else:
if store.resource_type == 'coverageStore':
try:
logger.info(" - Going to purge the " + store.resource_type + " : " + store.href)
cat.reset() # this resets the coverage readers and unlocks the files
cat.delete(store, purge='all', recurse=True)
cat.reload() # this preservers the integrity of geoserver
except FailedRequestError as e:
# Trying to recursively purge a store may fail
# We'll catch the exception and log it.
logger.debug(e)
else:
try:
if not store.get_resources():
cat.delete(store, recurse=True)
except FailedRequestError as e:
# Catch the exception and log it.
logger.debug(e)
def delete_from_postgis(resource_name):
"""
Delete a table from PostGIS (because Geoserver won't do it yet);
to be used after deleting a layer from the system.
"""
import psycopg2
db = ogc_server_settings.datastore_db
conn = psycopg2.connect(
"dbname='" +
db['NAME'] +
"' user='" +
db['USER'] +
"' password='" +
db['PASSWORD'] +
"' port=" +
db['PORT'] +
" host='" +
db['HOST'] +
"'")
try:
cur = conn.cursor()
cur.execute("SELECT DropGeometryTable ('%s')" % resource_name)
conn.commit()
except Exception as e:
logger.error(
"Error deleting PostGIS table %s:%s",
resource_name,
str(e))
finally:
conn.close()
def gs_slurp(
ignore_errors=True,
verbosity=1,
console=None,
owner=None,
workspace=None,
store=None,
filter=None,
skip_unadvertised=False,
skip_geonode_registered=False,
remove_deleted=False):
"""Configure the layers available in GeoServer in GeoNode.
It returns a list of dictionaries with the name of the layer,
the result of the operation and the errors and traceback if it failed.
"""
if console is None:
console = open(os.devnull, 'w')
if verbosity > 1:
print >> console, "Inspecting the available layers in GeoServer ..."
cat = Catalog(ogc_server_settings.internal_rest, _user, _password)
if workspace is not None:
workspace = cat.get_workspace(workspace)
if workspace is None:
resources = []
else:
# obtain the store from within the workspace. if it exists, obtain resources
# directly from store, otherwise return an empty list:
if store is not None:
store = get_store(cat, store, workspace=workspace)
if store is None:
resources = []
else:
resources = cat.get_resources(store=store)
else:
resources = cat.get_resources(workspace=workspace)
elif store is not None:
store = get_store(cat, store)
resources = cat.get_resources(store=store)
else:
resources = cat.get_resources()
if remove_deleted:
resources_for_delete_compare = resources[:]
workspace_for_delete_compare = workspace
# filter out layers for delete comparison with GeoNode layers by following criteria:
# enabled = true, if --skip-unadvertised: advertised = true, but
# disregard the filter parameter in the case of deleting layers
resources_for_delete_compare = [
k for k in resources_for_delete_compare if k.enabled in ["true", True]]
if skip_unadvertised:
resources_for_delete_compare = [
k for k in resources_for_delete_compare if k.advertised in ["true", True]]
if filter:
resources = [k for k in resources if filter in k.name]
# filter out layers depending on enabled, advertised status:
resources = [k for k in resources if k.enabled in ["true", True]]
if skip_unadvertised:
resources = [k for k in resources if k.advertised in ["true", True]]
# filter out layers already registered in geonode
layer_names = Layer.objects.all().values_list('typename', flat=True)
if skip_geonode_registered:
resources = [k for k in resources
if not '%s:%s' % (k.workspace.name, k.name) in layer_names]
# TODO: Should we do something with these?
# i.e. look for matching layers in GeoNode and also disable?
# disabled_resources = [k for k in resources if k.enabled == "false"]
number = len(resources)
if verbosity > 1:
msg = "Found %d layers, starting processing" % number
print >> console, msg
output = {
'stats': {
'failed': 0,
'updated': 0,
'created': 0,
'deleted': 0,
},
'layers': [],
'deleted_layers': []
}
start = datetime.datetime.now()
for i, resource in enumerate(resources):
name = resource.name
the_store = resource.store
workspace = the_store.workspace
try:
layer, created = Layer.objects.get_or_create(name=name, defaults={
"workspace": workspace.name,
"store": the_store.name,
"storeType": the_store.resource_type,
"typename": "%s:%s" % (workspace.name.encode('utf-8'), resource.name.encode('utf-8')),
"title": resource.title or 'No title provided',
"abstract": resource.abstract or 'No abstract provided',
"owner": owner,
"uuid": str(uuid.uuid4()),
"bbox_x0": Decimal(resource.latlon_bbox[0]),
"bbox_x1": Decimal(resource.latlon_bbox[1]),
"bbox_y0": Decimal(resource.latlon_bbox[2]),
"bbox_y1": Decimal(resource.latlon_bbox[3])
})
# recalculate the layer statistics
set_attributes(layer, overwrite=True)
# Fix metadata links if the ip has changed
if layer.link_set.metadata().count() > 0:
if not created and settings.SITEURL not in layer.link_set.metadata()[0].url:
layer.link_set.metadata().delete()
layer.save()
metadata_links = []
for link in layer.link_set.metadata():
metadata_links.append((link.mime, link.name, link.url))
resource.metadata_links = metadata_links
cat.save(resource)
except Exception as e:
if ignore_errors:
status = 'failed'
exception_type, error, traceback = sys.exc_info()
else:
if verbosity > 0:
msg = "Stopping process because --ignore-errors was not set and an error was found."
print >> sys.stderr, msg
raise Exception(
'Failed to process %s' %
resource.name.encode('utf-8'), e), None, sys.exc_info()[2]
else:
if created:
layer.set_default_permissions()
status = 'created'
output['stats']['created'] += 1
else:
status = 'updated'
output['stats']['updated'] += 1
msg = "[%s] Layer %s (%d/%d)" % (status, name, i + 1, number)
info = {'name': name, 'status': status}
if status == 'failed':
output['stats']['failed'] += 1
info['traceback'] = traceback
info['exception_type'] = exception_type
info['error'] = error
output['layers'].append(info)
if verbosity > 0:
print >> console, msg
if remove_deleted:
q = Layer.objects.filter()
if workspace_for_delete_compare is not None:
if isinstance(workspace_for_delete_compare, Workspace):
q = q.filter(
workspace__exact=workspace_for_delete_compare.name)
else:
q = q.filter(workspace__exact=workspace_for_delete_compare)
if store is not None:
if isinstance(
store,
CoverageStore) or isinstance(
store,
DataStore):
q = q.filter(store__exact=store.name)
else:
q = q.filter(store__exact=store)
logger.debug("Executing 'remove_deleted' logic")
logger.debug("GeoNode Layers Found:")
# compare the list of GeoNode layers obtained via query/filter with valid resources found in GeoServer
# filtered per options passed to updatelayers: --workspace, --store, --skip-unadvertised
# add any layers not found in GeoServer to deleted_layers (must match
# workspace and store as well):
deleted_layers = []
for layer in q:
logger.debug(
"GeoNode Layer info: name: %s, workspace: %s, store: %s",
layer.name,
layer.workspace,
layer.store)
layer_found_in_geoserver = False
for resource in resources_for_delete_compare:
# if layer.name matches a GeoServer resource, check also that
# workspace and store match, mark valid:
if layer.name == resource.name:
if layer.workspace == resource.workspace.name and layer.store == resource.store.name:
logger.debug(
"Matches GeoServer layer: name: %s, workspace: %s, store: %s",
resource.name,
resource.workspace.name,
resource.store.name)
layer_found_in_geoserver = True
if not layer_found_in_geoserver:
logger.debug(
"----- Layer %s not matched, marked for deletion ---------------",
layer.name)
deleted_layers.append(layer)
number_deleted = len(deleted_layers)
if verbosity > 1:
msg = "\nFound %d layers to delete, starting processing" % number_deleted if number_deleted > 0 else \
"\nFound %d layers to delete" % number_deleted
print >> console, msg
for i, layer in enumerate(deleted_layers):
logger.debug(
"GeoNode Layer to delete: name: %s, workspace: %s, store: %s",
layer.name,
layer.workspace,
layer.store)
try:
# delete ratings, comments, and taggit tags:
ct = ContentType.objects.get_for_model(layer)
OverallRating.objects.filter(
content_type=ct,
object_id=layer.id).delete()
Comment.objects.filter(
content_type=ct,
object_id=layer.id).delete()
layer.keywords.clear()
layer.delete()
output['stats']['deleted'] += 1
status = "delete_succeeded"
except Exception as e:
status = "delete_failed"
finally:
from .signals import geoserver_pre_delete
pre_delete.connect(geoserver_pre_delete, sender=Layer)
msg = "[%s] Layer %s (%d/%d)" % (status,
layer.name,
i + 1,
number_deleted)
info = {'name': layer.name, 'status': status}
if status == "delete_failed":
exception_type, error, traceback = sys.exc_info()
info['traceback'] = traceback
info['exception_type'] = exception_type
info['error'] = error
output['deleted_layers'].append(info)
if verbosity > 0:
print >> console, msg
finish = datetime.datetime.now()
td = finish - start
output['stats']['duration_sec'] = td.microseconds / \
1000000 + td.seconds + td.days * 24 * 3600
return output
def get_stores(store_type=None):
cat = Catalog(ogc_server_settings.internal_rest, _user, _password)
stores = cat.get_stores()
store_list = []
for store in stores:
store.fetch()
stype = store.dom.find('type').text.lower()
if store_type and store_type.lower() == stype:
store_list.append({'name': store.name, 'type': stype})
elif store_type is None:
store_list.append({'name': store.name, 'type': stype})
return store_list
def set_attributes(layer, overwrite=False):
"""
Retrieve layer attribute names & types from Geoserver,
then store in GeoNode database using Attribute model
"""
attribute_map = []
server_url = ogc_server_settings.LOCATION if layer.storeType != "remoteStore" else layer.service.base_url
if layer.storeType == "remoteStore" and layer.service.ptype == "gxp_arcrestsource":
dft_url = server_url + ("%s?f=json" % layer.typename)
try:
# The code below will fail if http_client cannot be imported
body = json.loads(http_client.request(dft_url)[1])
attribute_map = [[n["name"], _esri_types[n["type"]]]
for n in body["fields"] if n.get("name") and n.get("type")]
except Exception:
attribute_map = []
elif layer.storeType in ["dataStore", "remoteStore", "wmsStore"]:
dft_url = re.sub("\/wms\/?$",
"/",
server_url) + "wfs?" + urllib.urlencode({"service": "wfs",
"version": "1.0.0",
"request": "DescribeFeatureType",
"typename": layer.typename.encode('utf-8'),
})
try:
# The code below will fail if http_client cannot be imported or
# WFS not supported
body = http_client.request(dft_url)[1]
doc = etree.fromstring(body)
path = ".//{xsd}extension/{xsd}sequence/{xsd}element".format(
xsd="{http://www.w3.org/2001/XMLSchema}")
attribute_map = [[n.attrib["name"], n.attrib["type"]] for n in doc.findall(
path) if n.attrib.get("name") and n.attrib.get("type")]
except Exception:
attribute_map = []
# Try WMS instead
dft_url = server_url + "?" + urllib.urlencode({
"service": "wms",
"version": "1.0.0",
"request": "GetFeatureInfo",
"bbox": ','.join([str(x) for x in layer.bbox]),
"LAYERS": layer.typename.encode('utf-8'),
"QUERY_LAYERS": layer.typename.encode('utf-8'),
"feature_count": 1,
"width": 1,
"height": 1,
"srs": "EPSG:4326",
"info_format": "text/html",
"x": 1,
"y": 1
})
try:
body = http_client.request(dft_url)[1]
soup = BeautifulSoup(body)
for field in soup.findAll('th'):
if(field.string is None):
field_name = field.contents[0].string
else:
field_name = field.string
attribute_map.append([field_name, "xsd:string"])
except Exception:
attribute_map = []
elif layer.storeType in ["coverageStore"]:
dc_url = server_url + "wcs?" + urllib.urlencode({
"service": "wcs",
"version": "1.1.0",
"request": "DescribeCoverage",
"identifiers": layer.typename.encode('utf-8')
})
try:
response, body = http_client.request(dc_url)
doc = etree.fromstring(body)
path = ".//{wcs}Axis/{wcs}AvailableKeys/{wcs}Key".format(
wcs="{http://www.opengis.net/wcs/1.1.1}")
attribute_map = [[n.text, "raster"] for n in doc.findall(path)]
except Exception:
attribute_map = []
# we need 3 more items for description, attribute_label and display_order
attribute_map_dict = {
'field': 0,
'ftype': 1,
'description': 2,
'label': 3,
'display_order': 4,
}
for attribute in attribute_map:
attribute.extend((None, None, 0))
attributes = layer.attribute_set.all()
# Delete existing attributes if they no longer exist in an updated layer
for la in attributes:
lafound = False
for attribute in attribute_map:
field, ftype, description, label, display_order = attribute
if field == la.attribute:
lafound = True
# store description and attribute_label in attribute_map
attribute[attribute_map_dict['description']] = la.description
attribute[attribute_map_dict['label']] = la.attribute_label
attribute[attribute_map_dict['display_order']] = la.display_order
if overwrite or not lafound:
logger.debug(
"Going to delete [%s] for [%s]",
la.attribute,
layer.name.encode('utf-8'))
la.delete()
# Add new layer attributes if they don't already exist
if attribute_map is not None:
iter = len(Attribute.objects.filter(layer=layer)) + 1
for attribute in attribute_map:
field, ftype, description, label, display_order = attribute
if field is not None:
la, created = Attribute.objects.get_or_create(
layer=layer, attribute=field, attribute_type=ftype,
description=description, attribute_label=label,
display_order=display_order)
if created:
if is_layer_attribute_aggregable(
layer.storeType,
field,
ftype):
logger.debug("Generating layer attribute statistics")
result = get_attribute_statistics(layer.name, field)
if result is not None:
la.count = result['Count']
la.min = result['Min']
la.max = result['Max']
la.average = result['Average']
la.median = result['Median']
la.stddev = result['StandardDeviation']
la.sum = result['Sum']
la.unique_values = result['unique_values']
la.last_stats_updated = datetime.datetime.now()
la.visible = ftype.find("gml:") != 0
la.display_order = iter
la.save()
iter += 1
logger.debug(
"Created [%s] attribute for [%s]",
field,
layer.name.encode('utf-8'))
else:
logger.debug("No attributes found")
def set_styles(layer, gs_catalog):
style_set = []
gs_layer = gs_catalog.get_layer(layer.name)
default_style = gs_layer.default_style
layer.default_style = save_style(default_style)
# FIXME: This should remove styles that are no longer valid
style_set.append(layer.default_style)
alt_styles = gs_layer.styles
for alt_style in alt_styles:
style_set.append(save_style(alt_style))
layer.styles = style_set
return layer
def save_style(gs_style):
style, created = Style.objects.get_or_create(name=gs_style.name)
style.sld_title = gs_style.sld_title
style.sld_body = gs_style.sld_body
style.sld_url = gs_style.body_href
style.save()
return style
def is_layer_attribute_aggregable(store_type, field_name, field_type):
"""
Decipher whether layer attribute is suitable for statistical derivation
"""
# must be vector layer
if store_type != 'dataStore':
return False
# must be a numeric data type
if field_type not in LAYER_ATTRIBUTE_NUMERIC_DATA_TYPES:
return False
# must not be an identifier type field
if field_name.lower() in ['id', 'identifier']:
return False
return True
def get_attribute_statistics(layer_name, field):
"""
Generate statistics (range, mean, median, standard deviation, unique values)
for layer attribute
"""
logger.debug('Deriving aggregate statistics for attribute %s', field)
if not ogc_server_settings.WPS_ENABLED:
return None
try:
return wps_execute_layer_attribute_statistics(layer_name, field)
except Exception:
logger.exception('Error generating layer aggregate statistics')
def get_wcs_record(instance, retry=True):
wcs = WebCoverageService(ogc_server_settings.LOCATION + 'wcs', '1.0.0')
key = instance.workspace + ':' + instance.name
logger.debug(wcs.contents)
if key in wcs.contents:
return wcs.contents[key]
else:
msg = ("Layer '%s' was not found in WCS service at %s." %
(key, ogc_server_settings.public_url)
)
if retry:
logger.debug(
msg +
' Waiting a couple of seconds before trying again.')
time.sleep(2)
return get_wcs_record(instance, retry=False)
else:
raise GeoNodeException(msg)
def get_coverage_grid_extent(instance):
"""
Returns a list of integers with the size of the coverage
extent in pixels
"""
instance_wcs = get_wcs_record(instance)
grid = instance_wcs.grid
return [(int(h) - int(l) + 1) for
h, l in zip(grid.highlimits, grid.lowlimits)]
GEOSERVER_LAYER_TYPES = {
'vector': FeatureType.resource_type,
'raster': Coverage.resource_type,
}
def geoserver_layer_type(filename):
the_type = layer_type(filename)
return GEOSERVER_LAYER_TYPES[the_type]
def cleanup(name, uuid):
"""Deletes GeoServer and Catalogue records for a given name.
Useful to clean the mess when something goes terribly wrong.
It also verifies if the Django record existed, in which case
it performs no action.
"""
try:
Layer.objects.get(name=name)
except Layer.DoesNotExist as e:
pass
else:
msg = ('Not doing any cleanup because the layer %s exists in the '
'Django db.' % name)
raise GeoNodeException(msg)
cat = gs_catalog
gs_store = None
gs_layer = None
gs_resource = None
# FIXME: Could this lead to someone deleting for example a postgis db
# with the same name of the uploaded file?.
try:
gs_store = cat.get_store(name)
if gs_store is not None:
gs_layer = cat.get_layer(name)
if gs_layer is not None:
gs_resource = gs_layer.resource
else:
gs_layer = None
gs_resource = None
except FailedRequestError as e:
msg = ('Couldn\'t connect to GeoServer while cleaning up layer '
'[%s] !!', str(e))
logger.warning(msg)
if gs_layer is not None:
try:
cat.delete(gs_layer)
except:
logger.warning("Couldn't delete GeoServer layer during cleanup()")
if gs_resource is not None:
try:
cat.delete(gs_resource)
except:
msg = 'Couldn\'t delete GeoServer resource during cleanup()'
logger.warning(msg)
if gs_store is not None:
try:
cat.delete(gs_store)
except:
logger.warning("Couldn't delete GeoServer store during cleanup()")
logger.warning('Deleting dangling Catalogue record for [%s] '
'(no Django record to match)', name)
if 'geonode.catalogue' in settings.INSTALLED_APPS:
from geonode.catalogue import get_catalogue
catalogue = get_catalogue()
catalogue.remove_record(uuid)
logger.warning('Finished cleanup after failed Catalogue/Django '
'import for layer: %s', name)
def _create_featurestore(name, data, overwrite=False, charset="UTF-8", workspace=None):
cat = gs_catalog
cat.create_featurestore(name, data, overwrite=overwrite, charset=charset)
store = get_store(cat, name, workspace=workspace)
return store, cat.get_resource(name, store=store, workspace=workspace)
def _create_coveragestore(name, data, overwrite=False, charset="UTF-8", workspace=None):
cat = gs_catalog
cat.create_coveragestore(name, data, overwrite=overwrite)
store = get_store(cat, name, workspace=workspace)
return store, cat.get_resource(name, store=store, workspace=workspace)
def _create_db_featurestore(name, data, overwrite=False, charset="UTF-8", workspace=None):
"""Create a database store then use it to import a shapefile.
If the import into the database fails then delete the store
(and delete the PostGIS table for it).
"""
cat = gs_catalog
dsname = ogc_server_settings.DATASTORE
try:
ds = get_store(cat, dsname, workspace=workspace)
except FailedRequestError:
ds = cat.create_datastore(dsname, workspace=workspace)
db = ogc_server_settings.datastore_db
db_engine = 'postgis' if \
'postgis' in db['ENGINE'] else db['ENGINE']
ds.connection_parameters.update(
{'validate connections': 'true',
'max connections': '10',
'min connections': '1',
'fetch size': '1000',
'host': db['HOST'],
'port': db['PORT'],
'database': db['NAME'],
'user': db['USER'],
'passwd': db['PASSWORD'],
'dbtype': db_engine}
)
cat.save(ds)
ds = get_store(cat, dsname, workspace=workspace)
try:
cat.add_data_to_store(ds, name, data,
overwrite=overwrite,
charset=charset)
return ds, cat.get_resource(name, store=ds, workspace=workspace)
except Exception:
msg = _("An exception occurred loading data to PostGIS")
msg += "- %s" % (sys.exc_info()[1])
try:
delete_from_postgis(name)
except Exception:
msg += _(" Additionally an error occured during database cleanup")
msg += "- %s" % (sys.exc_info()[1])
raise GeoNodeException(msg)
def get_store(cat, name, workspace=None):
# Make sure workspace is a workspace object and not a string.
# If the workspace does not exist, continue as if no workspace had been defined.
if isinstance(workspace, basestring):
workspace = cat.get_workspace(workspace)
if workspace is None:
workspace = cat.get_default_workspace()
try:
store = cat.get_xml('%s/%s.xml' % (workspace.datastore_url[:-4], name))
except FailedRequestError:
try:
store = cat.get_xml('%s/%s.xml' % (workspace.coveragestore_url[:-4], name))
except FailedRequestError:
try:
store = cat.get_xml('%s/%s.xml' % (workspace.wmsstore_url[:-4], name))
except FailedRequestError:
raise FailedRequestError("No store found named: " + name)
if store.tag == 'dataStore':
store = datastore_from_index(cat, workspace, store)
elif store.tag == 'coverageStore':
store = coveragestore_from_index(cat, workspace, store)
elif store.tag == 'wmsStore':
store = wmsstore_from_index(cat, workspace, store)
return store
def geoserver_upload(
layer,
base_file,
user,
name,
overwrite=True,
title=None,
abstract=None,
permissions=None,
keywords=(),
charset='UTF-8'):
# Step 2. Check that it is uploading to the same resource type as
# the existing resource
logger.info('>>> Step 2. Make sure we are not trying to overwrite a '
'existing resource named [%s] with the wrong type', name)
the_layer_type = geoserver_layer_type(base_file)
# Get a short handle to the gsconfig geoserver catalog
cat = gs_catalog
# Fix bug on layer replace #2642
# https://github.com/GeoNode/geonode/issues/2462
cat.reload()
workspace = cat.get_default_workspace()
# Check if the store exists in geoserver
try:
store = get_store(cat, name, workspace=workspace)
except geoserver.catalog.FailedRequestError as e:
# There is no store, ergo the road is clear
pass
else:
# If we get a store, we do the following:
resources = store.get_resources()
# If the store is empty, we just delete it.
if len(resources) == 0:
cat.delete(store)
else:
# If our resource is already configured in the store it needs
# to have the right resource type
for resource in resources:
if resource.name == name:
msg = 'Name already in use and overwrite is False'
assert overwrite, msg
existing_type = resource.resource_type
if existing_type != the_layer_type:
msg = ('Type of uploaded file %s (%s) '
'does not match type of existing '
'resource type '
'%s' % (name, the_layer_type, existing_type))
logger.info(msg)
raise GeoNodeException(msg)
# Step 3. Identify whether it is vector or raster and which extra files
# are needed.
logger.info('>>> Step 3. Identifying if [%s] is vector or raster and '
'gathering extra files', name)
if the_layer_type == FeatureType.resource_type:
logger.debug('Uploading vector layer: [%s]', base_file)
if ogc_server_settings.DATASTORE:
create_store_and_resource = _create_db_featurestore
else:
create_store_and_resource = _create_featurestore
elif the_layer_type == Coverage.resource_type:
logger.debug("Uploading raster layer: [%s]", base_file)
create_store_and_resource = _create_coveragestore
else:
msg = ('The layer type for name %s is %s. It should be '
'%s or %s,' % (name,
the_layer_type,
FeatureType.resource_type,
Coverage.resource_type))
logger.warn(msg)
raise GeoNodeException(msg)
# Step 4. Create the store in GeoServer
logger.info('>>> Step 4. Starting upload of [%s] to GeoServer...', name)
# Get the helper files if they exist
files = get_files(base_file)
data = files
if 'shp' not in files:
data = base_file
try:
store, gs_resource = create_store_and_resource(name,
data,
charset=charset,
overwrite=overwrite,
workspace=workspace)
except UploadError as e:
msg = ('Could not save the layer %s, there was an upload '
'error: %s' % (name, str(e)))
logger.warn(msg)
e.args = (msg,)
raise
except ConflictingDataError as e:
# A datastore of this name already exists
msg = ('GeoServer reported a conflict creating a store with name %s: '
'"%s". This should never happen because a brand new name '
'should have been generated. But since it happened, '
'try renaming the file or deleting the store in '
'GeoServer.' % (name, str(e)))
logger.warn(msg)
e.args = (msg,)
raise
else:
logger.debug('Finished upload of [%s] to GeoServer without '
'errors.', name)
# Step 5. Create the resource in GeoServer
logger.info('>>> Step 5. Generating the metadata for [%s] after '
'successful import to GeoSever', name)
# Verify the resource was created
if gs_resource is not None:
assert gs_resource.name == name
else:
msg = ('GeoNode encountered problems when creating layer %s.'
'It cannot find the Layer that matches this Workspace.'
'try renaming your files.' % name)
logger.warn(msg)
raise GeoNodeException(msg)
# Step 6. Make sure our data always has a valid projection
# FIXME: Put this in gsconfig.py
logger.info('>>> Step 6. Making sure [%s] has a valid projection' % name)
if gs_resource.latlon_bbox is None:
box = gs_resource.native_bbox[:4]
minx, maxx, miny, maxy = [float(a) for a in box]
if -180 <= minx <= 180 and -180 <= maxx <= 180 and \
-90 <= miny <= 90 and -90 <= maxy <= 90:
logger.info('GeoServer failed to detect the projection for layer '
'[%s]. Guessing EPSG:4326', name)
# If GeoServer couldn't figure out the projection, we just
# assume it's lat/lon to avoid a bad GeoServer configuration
gs_resource.latlon_bbox = gs_resource.native_bbox
gs_resource.projection = "EPSG:4326"
cat.save(gs_resource)
else:
msg = ('GeoServer failed to detect the projection for layer '
'[%s]. It doesn\'t look like EPSG:4326, so backing out '
'the layer.')
logger.info(msg, name)
cascading_delete(cat, name)
raise GeoNodeException(msg % name)
# Step 7. Create the style and assign it to the created resource
# FIXME: Put this in gsconfig.py
logger.info('>>> Step 7. Creating style for [%s]' % name)
publishing = cat.get_layer(name)
if 'sld' in files:
f = open(files['sld'], 'r')
sld = f.read()
f.close()
else:
sld = get_sld_for(publishing)
if sld is not None:
try:
cat.create_style(name, sld)
except geoserver.catalog.ConflictingDataError as e:
msg = ('There was already a style named %s in GeoServer, '
'cannot overwrite: "%s"' % (name, str(e)))
logger.warn(msg)
e.args = (msg,)
# FIXME: Should we use the fully qualified typename?
publishing.default_style = cat.get_style(name)
cat.save(publishing)
# Step 10. Create the Django record for the layer
logger.info('>>> Step 10. Creating Django record for [%s]', name)
# FIXME: Do this inside the layer object
typename = workspace.name + ':' + gs_resource.name
layer_uuid = str(uuid.uuid1())
defaults = dict(store=gs_resource.store.name,
storeType=gs_resource.store.resource_type,
typename=typename,
title=title or gs_resource.title,
uuid=layer_uuid,
abstract=abstract or gs_resource.abstract or '',
owner=user)
return name, workspace.name, defaults, gs_resource
class ServerDoesNotExist(Exception):
pass
class OGC_Server(object):
"""
OGC Server object.
"""
def __init__(self, ogc_server, alias):
self.alias = alias
self.server = ogc_server
def __getattr__(self, item):
return self.server.get(item)
@property
def credentials(self):
"""
Returns a tuple of the server's credentials.
"""
creds = namedtuple('OGC_SERVER_CREDENTIALS', ['username', 'password'])
return creds(username=self.USER, password=self.PASSWORD)
@property
def datastore_db(self):
"""
Returns the server's datastore dict or None.
"""
if self.DATASTORE and settings.DATABASES.get(self.DATASTORE, None):
return settings.DATABASES.get(self.DATASTORE, dict())
else:
return dict()
@property
def ows(self):
"""
The Open Web Service url for the server.
"""
location = self.PUBLIC_LOCATION if self.PUBLIC_LOCATION else self.LOCATION
return self.OWS_LOCATION if self.OWS_LOCATION else location + 'ows'
@property
def rest(self):
"""
The REST endpoint for the server.
"""
return self.LOCATION + \
'rest' if not self.REST_LOCATION else self.REST_LOCATION
@property
def public_url(self):
"""
The global public endpoint for the server.
"""
return self.LOCATION if not self.PUBLIC_LOCATION else self.PUBLIC_LOCATION
@property
def internal_ows(self):
"""
The Open Web Service url for the server used by GeoNode internally.
"""
location = self.LOCATION
return location + 'ows'
@property
def internal_rest(self):
"""
The internal REST endpoint for the server.
"""
return self.LOCATION + 'rest'
@property
def hostname(self):
return urlsplit(self.LOCATION).hostname
@property
def netloc(self):
return urlsplit(self.LOCATION).netloc
def __str__(self):
return self.alias
class OGC_Servers_Handler(object):
"""
OGC Server Settings Convenience dict.
"""
def __init__(self, ogc_server_dict):
self.servers = ogc_server_dict
# FIXME(Ariel): Are there better ways to do this without involving
# local?
self._servers = local()
def ensure_valid_configuration(self, alias):
"""
Ensures the settings are valid.
"""
try:
server = self.servers[alias]
except KeyError:
raise ServerDoesNotExist("The server %s doesn't exist" % alias)
datastore = server.get('DATASTORE')
uploader_backend = getattr(
settings,
'UPLOADER',
dict()).get(
'BACKEND',
'geonode.rest')
if uploader_backend == 'geonode.importer' and datastore and not settings.DATABASES.get(
datastore):
raise ImproperlyConfigured(
'The OGC_SERVER setting specifies a datastore '
'but no connection parameters are present.')
if uploader_backend == 'geonode.importer' and not datastore:
raise ImproperlyConfigured(
'The UPLOADER BACKEND is set to geonode.importer but no DATASTORE is specified.')
if 'PRINTNG_ENABLED' in server:
raise ImproperlyConfigured("The PRINTNG_ENABLED setting has been removed, use 'PRINT_NG_ENABLED' instead.")
def ensure_defaults(self, alias):
"""
Puts the defaults into the settings dictionary for a given connection where no settings is provided.
"""
try:
server = self.servers[alias]
except KeyError:
raise ServerDoesNotExist("The server %s doesn't exist" % alias)
server.setdefault('BACKEND', 'geonode.geoserver')
server.setdefault('LOCATION', 'http://localhost:8080/geoserver/')
server.setdefault('USER', 'admin')
server.setdefault('PASSWORD', 'geoserver')
server.setdefault('DATASTORE', str())
server.setdefault('GEOGIG_DATASTORE_DIR', str())
for option in ['MAPFISH_PRINT_ENABLED', 'PRINT_NG_ENABLED', 'GEONODE_SECURITY_ENABLED',
'BACKEND_WRITE_ENABLED']:
server.setdefault(option, True)
for option in ['GEOGIG_ENABLED', 'WMST_ENABLED', 'WPS_ENABLED']:
server.setdefault(option, False)
def __getitem__(self, alias):
if hasattr(self._servers, alias):
return getattr(self._servers, alias)
self.ensure_defaults(alias)
self.ensure_valid_configuration(alias)
server = self.servers[alias]
server = OGC_Server(alias=alias, ogc_server=server)
setattr(self._servers, alias, server)
return server
def __setitem__(self, key, value):
setattr(self._servers, key, value)
def __iter__(self):
return iter(self.servers)
def all(self):
return [self[alias] for alias in self]
def get_wms():
wms_url = ogc_server_settings.internal_ows + \
"?service=WMS&request=GetCapabilities&version=1.1.0"
netloc = urlparse(wms_url).netloc
http = httplib2.Http()
http.add_credentials(_user, _password)
http.authorizations.append(
httplib2.BasicAuthentication(
(_user, _password),
netloc,
wms_url,
{},
None,
None,
http
)
)
body = http.request(wms_url)[1]
_wms = WebMapService(wms_url, xml=body)
return _wms
def wps_execute_layer_attribute_statistics(layer_name, field):
"""Derive aggregate statistics from WPS endpoint"""
# generate statistics using WPS
url = '%s/ows' % (ogc_server_settings.LOCATION)
# TODO: use owslib.wps.WebProcessingService for WPS interaction
# this requires GeoServer's WPS gs:Aggregate function to
# return a proper wps:ExecuteResponse
request = render_to_string('layers/wps_execute_gs_aggregate.xml', {
'layer_name': 'geonode:%s' % layer_name,
'field': field
})
response = http_post(
url,
request,
timeout=ogc_server_settings.TIMEOUT,
username=ogc_server_settings.credentials.username,
password=ogc_server_settings.credentials.password)
exml = etree.fromstring(response)
result = {}
for f in ['Min', 'Max', 'Average', 'Median', 'StandardDeviation', 'Sum']:
fr = exml.find(f)
if fr is not None:
result[f] = fr.text
else:
result[f] = 'NA'
count = exml.find('Count')
if count is not None:
result['Count'] = int(count.text)
else:
result['Count'] = 0
result['unique_values'] = 'NA'
return result
# TODO: find way of figuring out threshold better
# Looks incomplete what is the purpose if the nex lines?
# if result['Count'] < 10000:
# request = render_to_string('layers/wps_execute_gs_unique.xml', {
# 'layer_name': 'geonode:%s' % layer_name,
# 'field': field
# })
# response = http_post(
# url,
# request,
# timeout=ogc_server_settings.TIMEOUT,
# username=ogc_server_settings.credentials.username,
# password=ogc_server_settings.credentials.password)
# exml = etree.fromstring(response)
def style_update(request, url):
"""
Sync style stuff from GS to GN.
Ideally we should call this from a view straight from GXP, and we should use
gsConfig, that at this time does not support styles updates. Before gsConfig
is updated, for now we need to parse xml.
In case of a DELETE, we need to query request.path to get the style name,
and then remove it.
In case of a POST or PUT, we need to parse the xml from
request.body, which is in this format:
"""
if request.method in ('POST', 'PUT'): # we need to parse xml
# Need to remove NSx from IE11
if "HTTP_USER_AGENT" in request.META:
if ('Trident/7.0' in request.META['HTTP_USER_AGENT'] and
'rv:11.0' in request.META['HTTP_USER_AGENT']):
txml = re.sub(r'xmlns:NS[0-9]=""', '', request.body)
txml = re.sub(r'NS[0-9]:', '', txml)
request._body = txml
tree = ET.ElementTree(ET.fromstring(request.body))
elm_namedlayer_name = tree.findall(
'.//{http://www.opengis.net/sld}Name')[0]
elm_user_style_name = tree.findall(
'.//{http://www.opengis.net/sld}Name')[1]
elm_user_style_title = tree.find(
'.//{http://www.opengis.net/sld}Title')
if not elm_user_style_title:
elm_user_style_title = elm_user_style_name
layer_name = elm_namedlayer_name.text
style_name = elm_user_style_name.text
sld_body = '<?xml version="1.0" encoding="UTF-8"?>%s' % request.body
# add style in GN and associate it to layer
if request.method == 'POST':
style = Style(name=style_name, sld_body=sld_body, sld_url=url)
style.save()
layer = Layer.objects.all().filter(typename=layer_name)[0]
style.layer_styles.add(layer)
style.save()
if request.method == 'PUT': # update style in GN
style = Style.objects.all().filter(name=style_name)[0]
style.sld_body = sld_body
style.sld_url = url
if len(elm_user_style_title.text) > 0:
style.sld_title = elm_user_style_title.text
style.save()
for layer in style.layer_styles.all():
layer.save()
if request.method == 'DELETE': # delete style from GN
style_name = os.path.basename(request.path)
style = Style.objects.all().filter(name=style_name)[0]
style.delete()
def set_time_info(layer, attribute, end_attribute, presentation,
precision_value, precision_step, enabled=True):
'''Configure the time dimension for a layer.
:param layer: the layer to configure
:param attribute: the attribute used to represent the instant or period
start
:param end_attribute: the optional attribute used to represent the end
period
:param presentation: either 'LIST', 'DISCRETE_INTERVAL', or
'CONTINUOUS_INTERVAL'
:param precision_value: number representing number of steps
:param precision_step: one of 'seconds', 'minutes', 'hours', 'days',
'months', 'years'
:param enabled: defaults to True
'''
layer = gs_catalog.get_layer(layer.name)
if layer is None:
raise ValueError('no such layer: %s' % layer.name)
resource = layer.resource
resolution = None
if precision_value and precision_step:
resolution = '%s %s' % (precision_value, precision_step)
info = DimensionInfo("time", enabled, presentation, resolution, "ISO8601",
None, attribute=attribute, end_attribute=end_attribute)
metadata = dict(resource.metadata or {})
metadata['time'] = info
resource.metadata = metadata
gs_catalog.save(resource)
def get_time_info(layer):
'''Get the configured time dimension metadata for the layer as a dict.
The keys of the dict will be those of the parameters of `set_time_info`.
:returns: dict of values or None if not configured
'''
layer = gs_catalog.get_layer(layer.name)
if layer is None:
raise ValueError('no such layer: %s' % layer.name)
resource = layer.resource
info = resource.metadata.get('time', None) if resource.metadata else None
vals = None
if info:
value = step = None
resolution = info.resolution_str()
if resolution:
value, step = resolution.split()
vals = dict(
enabled=info.enabled,
attribute=info.attribute,
end_attribute=info.end_attribute,
presentation=info.presentation,
precision_value=value,
precision_step=step,
)
return vals
ogc_server_settings = OGC_Servers_Handler(settings.OGC_SERVER)['default']
_wms = None
_csw = None
_user, _password = ogc_server_settings.credentials
http_client = httplib2.Http()
http_client.add_credentials(_user, _password)
http_client.add_credentials(_user, _password)
_netloc = urlparse(ogc_server_settings.LOCATION).netloc
http_client.authorizations.append(
httplib2.BasicAuthentication(
(_user, _password),
_netloc,
ogc_server_settings.LOCATION,
{},
None,
None,
http_client
)
)
url = ogc_server_settings.rest
gs_catalog = Catalog(url, _user, _password)
gs_uploader = Client(url, _user, _password)
_punc = re.compile(r"[\.:]") # regex for punctuation that confuses restconfig
_foregrounds = [
"#ffbbbb",
"#bbffbb",
"#bbbbff",
"#ffffbb",
"#bbffff",
"#ffbbff"]
_backgrounds = [
"#880000",
"#008800",
"#000088",
"#888800",
"#008888",
"#880088"]
_marks = ["square", "circle", "cross", "x", "triangle"]
_style_contexts = izip(cycle(_foregrounds), cycle(_backgrounds), cycle(_marks))
_default_style_names = ["point", "line", "polygon", "raster"]
_esri_types = {
"esriFieldTypeDouble": "xsd:double",
"esriFieldTypeString": "xsd:string",
"esriFieldTypeSmallInteger": "xsd:int",
"esriFieldTypeInteger": "xsd:int",
"esriFieldTypeDate": "xsd:dateTime",
"esriFieldTypeOID": "xsd:long",
"esriFieldTypeGeometry": "xsd:geometry",
"esriFieldTypeBlob": "xsd:base64Binary",
"esriFieldTypeRaster": "raster",
"esriFieldTypeGUID": "xsd:string",
"esriFieldTypeGlobalID": "xsd:string",
"esriFieldTypeXML": "xsd:anyType"}
def _render_thumbnail(req_body):
spec = _fixup_ows_url(req_body)
url = "%srest/printng/render.png" % ogc_server_settings.LOCATION
hostname = urlparse(settings.SITEURL).hostname
params = dict(width=240, height=180, auth="%s,%s,%s" % (hostname, _user, _password))
url = url + "?" + urllib.urlencode(params)
# @todo annoying but not critical
# openlayers controls posted back contain a bad character. this seems
# to come from a − entity in the html, but it gets converted
# to a unicode en-dash but is not uncoded properly during transmission
# 'ignore' the error for now as controls are not being rendered...
data = spec
if type(data) == unicode:
# make sure any stored bad values are wiped out
# don't use keyword for errors - 2.6 compat
# though unicode accepts them (as seen below)
data = data.encode('ASCII', 'ignore')
data = unicode(data, errors='ignore').encode('UTF-8')
try:
resp, content = http_client.request(url, "POST", data, {
'Content-type': 'text/html'
})
except Exception:
logging.warning('Error generating thumbnail')
return
return content
def _fixup_ows_url(thumb_spec):
# @HACK - for whatever reason, a map's maplayers ows_url contains only /geoserver/wms
# so rendering of thumbnails fails - replace those uri's with full geoserver URL
import re
gspath = '"' + ogc_server_settings.public_url # this should be in img src attributes
repl = '"' + ogc_server_settings.LOCATION
return re.sub(gspath, repl, thumb_spec)
|
import os
yt=https://youtu.be/
mp3list = '/home/steakwipe/git/ytdl-namer' #i'd like this to be a runtime option later
def mp3gen():
for root, dirs, files in os.walk('.'):
for filename in files:
if os.path.splitext(filename)[1] == ".mp3":
yield os.path.join(root, filename)
for mp3file in mp3gen():
fn = os.path.splitext(os.path.basename('mp3file')),
text=print([fn[0]]),
url = text[-11::],
print(yt+url)
|
import gzip
import os
import pickle
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
matplotlib.use('TKagg')
def show_attention():
# Load attentions
print('Loading attentions to pickle file')
with gzip.open(
os.path.join('training_results', 'torch_train', 'attentions.pkl.gz'),
'r') as att_file:
attentions = pickle.load(att_file)
# Set up figure with colorbar
fig = plt.figure()
ax = fig.add_subplot(111)
cax = ax.matshow(np.mean(np.array(attentions),axis=(0,1)), cmap='bone')
fig.colorbar(cax)
# # Set up axes
# ax.set_xticklabels([''] + input_sentence.split(' ') +
# ['<EOS>'], rotation=90)
# ax.set_yticklabels([''] + output_words)
#
# # Show label at every tick
# ax.xaxis.set_major_locator(ticker.MultipleLocator(1))
# ax.yaxis.set_major_locator(ticker.MultipleLocator(1))
plt.show()
show_attention()
|
import datetime
from kivy.app import App
from kivy.uix.widget import Widget
import random
from kivy.clock import Clock
from kivy.properties import StringProperty, NumericProperty
from webScrape import webScraper
class MirrorWindow(Widget):
dayPrint = ['Sön', 'Mån', 'Tis', 'Ons', 'Tors', 'Fre', 'Lör']
secondsAnim = NumericProperty(0)
minute = NumericProperty(0)
time = StringProperty('')
day = StringProperty('')
date = StringProperty('')
weather1 = StringProperty('')
weather2 = StringProperty('')
weather3 = StringProperty('')
seconds = StringProperty('')
def update(self, dt):
self.time = datetime.datetime.today().strftime("%H:%M")
self.day = self.dayPrint[int(datetime.date.today().strftime('%w'))]
self.date = datetime.date.today().strftime('%y%m%d')
#self.seconds = str (( int (datetime.datetime.today().strftime('%f')) / 1000 ) )
#self.seconds = ( int (datetime.datetime.today().strftime('%f')) / 1000 )
self.seconds = str(datetime.datetime.today().strftime('%S'))
# self.weather1 = (' ').join(webScraper().weather()[0][:3])
# self.weather2 = (' ').join(webScraper().weather()[1][:3])
# self.weather3 = (' ').join(webScraper().weather()[2][:3])
#60 000 000
if self.secondsAnim < 360:
self.secondsAnim = self.secondsAnim + 6
else:
self.secondsAnim = 0
#self.minute = int (datetime.datetime.today().strftime('%S') )
if self.minute < 360:
self.minute = self.minute + 0.1
else:
self.minute = 0.1
class MirrorApp(App):
def build(self):
mirrorWindow = MirrorWindow()
Clock.schedule_interval(mirrorWindow.update, 0.01)
return mirrorWindow
if __name__ == '__main__':
MirrorApp().run()
|
from unittest import TestCase
from gnomon import MagneticField
class MockG4ThreeVector():
x = 0
y = 0
z = 0
class TestWandsToroidField(TestCase):
def setUp(self):
self.field_minus = MagneticField.WandsToroidField('-')
self.field_plus = MagneticField.WandsToroidField('+')
self.fields = [self.field_minus, self.field_plus]
def test_PhenomModel(self):
for field in self.fields:
with self.assertRaises(ValueError):
field.PhenomModel(0)
with self.assertRaises(ValueError):
field.PhenomModel(-1)
field.PhenomModel(1)
def test_GetFieldValue(self):
for field in self.fields:
pos = MockG4ThreeVector()
vector = field.GetFieldValue(pos, 0)
self.assertEqual(vector.x, 0)
self.assertEqual(vector.y, 0)
self.assertEqual(vector.z, 0)
pos.x = 1
vector = field.GetFieldValue(pos, 0)
pos = MockG4ThreeVector()
pos.x = 1
pos.y = 2
pos.z = 3
vector_plus = self.field_plus.GetFieldValue(pos, 0)
vector_minus = self.field_minus.GetFieldValue(pos, 0)
self.assertAlmostEqual(vector_plus.x, -1 * vector_minus.x)
self.assertAlmostEqual(vector_plus.y, -1 * vector_minus.y)
self.assertAlmostEqual(vector_plus.z, -1 * vector_minus.z)
|
import errno
import logging
import os
import re
from django.conf import settings
from pootle.core.log import STORE_RESURRECTED, store_log
from pootle.core.utils.timezone import datetime_min
from pootle_app.models.directory import Directory
from pootle_language.models import Language
from pootle_store.models import Store
from pootle_store.util import absolute_real_path, relative_real_path
LANGCODE_RE = re.compile('^[a-z]{2,3}([_-][a-z]{2,3})?(@[a-z0-9]+)?$',
re.IGNORECASE)
LANGCODE_POSTFIX_RE = re.compile(
'^.*?[-_.]([a-z]{2,3}([_-][a-z]{2,3})?(@[a-z0-9]+)?)$', re.IGNORECASE)
def direct_language_match_filename(language_code, path_name):
name, ext = os.path.splitext(os.path.basename(path_name))
if name == language_code or name.lower() == language_code.lower():
return True
# Check file doesn't match another language.
if Language.objects.filter(code__iexact=name).count():
return False
detect = LANGCODE_POSTFIX_RE.split(name)
return (len(detect) > 1 and
(detect[1] == language_code or
detect[1].lower() == language_code.lower()))
def match_template_filename(project, filename):
"""Test if :param:`filename` might point at a template file for a given
:param:`project`.
"""
name, ext = os.path.splitext(os.path.basename(filename))
# FIXME: is the test for matching extension redundant?
if ext == os.path.extsep + project.get_template_filetype():
if ext != os.path.extsep + project.localfiletype:
# Template extension is distinct, surely file is a template.
return True
elif not find_lang_postfix(filename):
# File name can't possibly match any language, assume it is a
# template.
return True
return False
def get_matching_language_dirs(project_dir, language):
return [lang_dir for lang_dir in os.listdir(project_dir)
if language.code == lang_dir]
def get_non_existant_language_dir(project_dir, language, file_style,
make_dirs):
if file_style == "gnu":
return project_dir
elif make_dirs:
language_dir = os.path.join(project_dir, language.code)
os.mkdir(language_dir)
return language_dir
else:
raise IndexError("Directory not found for language %s, project %s" %
(language.code, project_dir))
def get_or_make_language_dir(project_dir, language, file_style, make_dirs):
matching_language_dirs = get_matching_language_dirs(project_dir, language)
if len(matching_language_dirs) == 0:
# If no matching directories can be found, check if it is a GNU-style
# project.
return get_non_existant_language_dir(project_dir, language, file_style,
make_dirs)
else:
return os.path.join(project_dir, matching_language_dirs[0])
def get_language_dir(project_dir, language, file_style, make_dirs):
language_dir = os.path.join(project_dir, language.code)
if not os.path.exists(language_dir):
return get_or_make_language_dir(project_dir, language, file_style,
make_dirs)
else:
return language_dir
def get_translation_project_dir(language, project_dir, file_style,
make_dirs=False):
"""Returns the base directory containing translations files for the
project.
:param make_dirs: if ``True``, project and language directories will be
created as necessary.
"""
if file_style == 'gnu':
return project_dir
else:
return get_language_dir(project_dir, language, file_style, make_dirs)
def is_hidden_file(path):
return path[0] == '.'
def split_files_and_dirs(ignored_files, ext, real_dir, file_filter):
files = []
dirs = []
for child_path in [child_path for child_path in os.listdir(real_dir)
if child_path not in ignored_files and
not is_hidden_file(child_path)]:
full_child_path = os.path.join(real_dir, child_path)
if (os.path.isfile(full_child_path) and
full_child_path.endswith(ext) and file_filter(full_child_path)):
files.append(child_path)
elif os.path.isdir(full_child_path):
dirs.append(child_path)
return files, dirs
def add_items(fs_items_set, db_items, create_or_resurrect_db_item, parent):
"""Add/make obsolete the database items to correspond to the filesystem.
:param fs_items_set: items (dirs, files) currently in the filesystem
:param db_items: dict (name, item) of items (dirs, stores) currently in the
database
:create_or_resurrect_db_item: callable that will create a new db item
or resurrect an obsolete db item with a given name and parent.
:parent: parent db directory for the items
:return: list of all items, list of newly added items
:rtype: tuple
"""
items = []
new_items = []
db_items_set = set(db_items)
items_to_delete = db_items_set - fs_items_set
items_to_create = fs_items_set - db_items_set
for name in items_to_delete:
db_items[name].makeobsolete()
if len(items_to_delete) > 0:
parent.update_all_cache()
for vfolder_treeitem in parent.vfolder_treeitems:
vfolder_treeitem.update_all_cache()
for name in db_items_set - items_to_delete:
items.append(db_items[name])
for name in items_to_create:
item = create_or_resurrect_db_item(name)
items.append(item)
new_items.append(item)
try:
item.save()
except Exception:
logging.exception('Error while adding %s', item)
return items, new_items
def create_or_resurrect_store(file, parent, name, translation_project):
"""Create or resurrect a store db item with given name and parent."""
try:
store = Store.objects.get(parent=parent, name=name)
store.obsolete = False
store.file_mtime = datetime_min
if store.last_sync_revision is None:
store.last_sync_revision = store.get_max_unit_revision()
store_log(user='system', action=STORE_RESURRECTED,
path=store.pootle_path, store=store.id)
except Store.DoesNotExist:
store = Store(file=file, parent=parent,
name=name, translation_project=translation_project)
store.mark_all_dirty()
return store
def create_or_resurrect_dir(name, parent):
"""Create or resurrect a directory db item with given name and parent."""
try:
dir = Directory.objects.get(parent=parent, name=name)
dir.obsolete = False
except Directory.DoesNotExist:
dir = Directory(name=name, parent=parent)
dir.mark_all_dirty()
return dir
def add_files(translation_project, ignored_files, ext, relative_dir, db_dir,
file_filter=lambda _x: True):
podir_path = to_podir_path(relative_dir)
files, dirs = split_files_and_dirs(ignored_files, ext, podir_path,
file_filter)
file_set = set(files)
dir_set = set(dirs)
existing_stores = dict((store.name, store) for store in
db_dir.child_stores.live().exclude(file='')
.iterator())
existing_dirs = dict((dir.name, dir) for dir in
db_dir.child_dirs.live().iterator())
files, new_files = add_items(
file_set,
existing_stores,
lambda name: create_or_resurrect_store(
file=os.path.join(relative_dir, name),
parent=db_dir,
name=name,
translation_project=translation_project,
),
db_dir,
)
db_subdirs, new_db_subdirs = add_items(
dir_set,
existing_dirs,
lambda name: create_or_resurrect_dir(name=name, parent=db_dir),
db_dir,
)
is_empty = len(files) == 0
for db_subdir in db_subdirs:
fs_subdir = os.path.join(relative_dir, db_subdir.name)
_files, _new_files, _is_empty = \
add_files(translation_project, ignored_files, ext, fs_subdir,
db_subdir, file_filter)
files += _files
new_files += _new_files
is_empty &= _is_empty
if is_empty:
db_dir.makeobsolete()
return files, new_files, is_empty
def to_podir_path(path):
path = relative_real_path(path)
return os.path.join(settings.POOTLE_TRANSLATION_DIRECTORY, path)
def find_lang_postfix(filename):
"""Finds the language code at end of a filename."""
name = os.path.splitext(os.path.basename(filename))[0]
if LANGCODE_RE.match(name):
return name
match = LANGCODE_POSTFIX_RE.match(name)
if match:
return match.groups()[0]
for code in Language.objects.values_list('code', flat=True):
if (name.endswith('-'+code) or name.endswith('_'+code) or
name.endswith('.'+code) or
name.lower().endswith('-'+code.lower()) or
name.endswith('_'+code) or name.endswith('.'+code)):
return code
def translation_project_dir_exists(language, project):
"""Tests if there are translation files corresponding to the given
:param:`language` and :param:`project`.
"""
if project.get_treestyle() == "gnu":
# GNU style projects are tricky
if language.code == 'templates':
# Language is template look for template files
for dirpath, dirnames, filenames in os.walk(
project.get_real_path()):
for filename in filenames:
if (project.file_belongs_to_project(filename,
match_templates=True)
and match_template_filename(project, filename)):
return True
else:
# find files with the language name in the project dir
for dirpath, dirnames, filenames in os.walk(
project.get_real_path()):
for filename in filenames:
# FIXME: don't reuse already used file
if (project.file_belongs_to_project(filename,
match_templates=False)
and direct_language_match_filename(language.code,
filename)):
return True
else:
# find directory with the language name in the project dir
try:
dirpath, dirnames, filename = os.walk(
project.get_real_path()).next()
if language.code in dirnames:
return True
except StopIteration:
pass
return False
def init_store_from_template(translation_project, template_store):
"""Initialize a new file for `translation_project` using `template_store`.
"""
if translation_project.file_style == 'gnu':
target_pootle_path, target_path = get_translated_name_gnu(
translation_project, template_store)
else:
target_pootle_path, target_path = get_translated_name(
translation_project, template_store)
# Create the missing directories for the new TP.
target_dir = os.path.dirname(target_path)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
output_file = template_store.file.store
output_file.settargetlanguage(translation_project.language.code)
output_file.savefile(target_path)
def get_translated_name_gnu(translation_project, store):
"""Given a template :param:`store` and a :param:`translation_project` return
target filename.
"""
pootle_path_parts = store.pootle_path.split('/')
pootle_path_parts[1] = translation_project.language.code
pootle_path = '/'.join(pootle_path_parts[:-1])
if not pootle_path.endswith('/'):
pootle_path = pootle_path + '/'
suffix = "%s%s%s" % (translation_project.language.code, os.extsep,
translation_project.project.localfiletype)
# try loading file first
try:
target_store = translation_project.stores.live().get(
parent__pootle_path=pootle_path,
name__iexact=suffix,
)
return (target_store.pootle_path,
target_store.file and target_store.file.path)
except Store.DoesNotExist:
target_store = None
# is this GNU-style with prefix?
use_prefix = (store.parent.child_stores.live().exclude(file="").count() > 1
or translation_project.stores.live().exclude(
name__iexact=suffix, file='').count())
if not use_prefix:
# let's make sure
for tp in translation_project.project.translationproject_set.exclude(
language__code='templates').iterator():
temp_suffix = \
"%s%s%s" % (tp.language.code, os.extsep,
translation_project.project.localfiletype)
if tp.stores.live().exclude(
name__iexact=temp_suffix).exclude(file="").count():
use_prefix = True
break
if use_prefix:
if store.translation_project.language.code == 'templates':
tprefix = os.path.splitext(store.name)[0]
# FIXME: we should detect separator
prefix = tprefix + '-'
else:
prefix = os.path.splitext(store.name)[0][:-len(
store.translation_project.language.code)]
tprefix = prefix[:-1]
try:
target_store = translation_project.stores.live().filter(
parent__pootle_path=pootle_path,
name__in=[
tprefix + '-' + suffix,
tprefix + '_' + suffix,
tprefix + '.' + suffix,
tprefix + '-' + suffix.lower(),
tprefix + '_' + suffix.lower(),
tprefix + '.' + suffix.lower(),
],
)[0]
return (target_store.pootle_path,
target_store.file and target_store.file.path)
except (Store.DoesNotExist, IndexError):
pass
else:
prefix = ""
if store.file:
path_parts = store.file.path.split(os.sep)
name = prefix + suffix
path_parts[-1] = name
pootle_path_parts[-1] = name
else:
path_parts = store.parent.get_real_path().split(os.sep)
path_parts.append(store.name)
return '/'.join(pootle_path_parts), os.sep.join(path_parts)
def get_translated_name(translation_project, store):
name, ext = os.path.splitext(store.name)
if store.file:
path_parts = store.file.name.split(os.sep)
else:
path_parts = store.parent.get_real_path().split(os.sep)
path_parts.append(store.name)
pootle_path_parts = store.pootle_path.split('/')
# Replace language code
path_parts[1] = translation_project.language.code
pootle_path_parts[1] = translation_project.language.code
# Replace extension
path_parts[-1] = "%s.%s" % (name,
translation_project.project.localfiletype)
pootle_path_parts[-1] = \
"%s.%s" % (name, translation_project.project.localfiletype)
return ('/'.join(pootle_path_parts),
absolute_real_path(os.sep.join(path_parts)))
def does_not_exist(path):
if os.path.exists(path):
return False
try:
os.stat(path)
# what the hell?
except OSError as e:
if e.errno == errno.ENOENT:
# explicit no such file or directory
return True
|
import hid
from typing import TYPE_CHECKING, Dict, Tuple, Optional, List, Any, Callable
from electrum_grs import bip32, constants
from electrum_grs.i18n import _
from electrum_grs.keystore import Hardware_KeyStore
from electrum_grs.transaction import PartialTransaction
from electrum_grs.wallet import Standard_Wallet, Multisig_Wallet, Deterministic_Wallet
from electrum_grs.util import bh2u, UserFacingException
from electrum_grs.base_wizard import ScriptTypeNotSupported, BaseWizard
from electrum_grs.logging import get_logger
from electrum_grs.plugin import Device, DeviceInfo, runs_in_hwd_thread
from electrum_grs.simple_config import SimpleConfig
from electrum_grs.json_db import StoredDict
from electrum_grs.storage import get_derivation_used_for_hw_device_encryption
from electrum_grs.bitcoin import OnchainOutputType
import electrum_grs.bitcoin as bitcoin
import electrum_grs.ecc as ecc
from ..hw_wallet import HW_PluginBase, HardwareClientBase
_logger = get_logger(__name__)
try:
from bitbox02 import bitbox02
from bitbox02 import util
from bitbox02.communication import (
devices,
HARDENED,
u2fhid,
bitbox_api_protocol,
FirmwareVersionOutdatedException,
)
requirements_ok = True
except ImportError as e:
if not (isinstance(e, ModuleNotFoundError) and e.name == 'bitbox02'):
_logger.exception('error importing bitbox02 plugin deps')
requirements_ok = False
class BitBox02Client(HardwareClientBase):
# handler is a BitBox02_Handler, importing it would lead to a circular dependency
def __init__(self, handler: Any, device: Device, config: SimpleConfig, *, plugin: HW_PluginBase):
HardwareClientBase.__init__(self, plugin=plugin)
self.bitbox02_device = None # type: Optional[bitbox02.BitBox02]
self.handler = handler
self.device_descriptor = device
self.config = config
self.bitbox_hid_info = None
if self.config.get("bitbox02") is None:
bitbox02_config: dict = {
"remote_static_noise_keys": [],
"noise_privkey": None,
}
self.config.set_key("bitbox02", bitbox02_config)
bitboxes = devices.get_any_bitbox02s()
for bitbox in bitboxes:
if (
bitbox["path"] == self.device_descriptor.path
and bitbox["interface_number"]
== self.device_descriptor.interface_number
):
self.bitbox_hid_info = bitbox
if self.bitbox_hid_info is None:
raise Exception("No BitBox02 detected")
def is_initialized(self) -> bool:
return True
@runs_in_hwd_thread
def close(self):
try:
self.bitbox02_device.close()
except:
pass
def has_usable_connection_with_device(self) -> bool:
if self.bitbox_hid_info is None:
return False
return True
@runs_in_hwd_thread
def get_soft_device_id(self) -> Optional[str]:
if self.handler is None:
# Can't do the pairing without the handler. This happens at wallet creation time, when
# listing the devices.
return None
if self.bitbox02_device is None:
self.pairing_dialog()
return self.bitbox02_device.root_fingerprint().hex()
@runs_in_hwd_thread
def pairing_dialog(self):
def pairing_step(code: str, device_response: Callable[[], bool]) -> bool:
msg = "Please compare and confirm the pairing code on your BitBox02:\n" + code
self.handler.show_message(msg)
try:
res = device_response()
except:
# Close the hid device on exception
hid_device.close()
raise
finally:
self.handler.finished()
return res
def exists_remote_static_pubkey(pubkey: bytes) -> bool:
bitbox02_config = self.config.get("bitbox02")
noise_keys = bitbox02_config.get("remote_static_noise_keys")
if noise_keys is not None:
if pubkey.hex() in [noise_key for noise_key in noise_keys]:
return True
return False
def set_remote_static_pubkey(pubkey: bytes) -> None:
if not exists_remote_static_pubkey(pubkey):
bitbox02_config = self.config.get("bitbox02")
if bitbox02_config.get("remote_static_noise_keys") is not None:
bitbox02_config["remote_static_noise_keys"].append(pubkey.hex())
else:
bitbox02_config["remote_static_noise_keys"] = [pubkey.hex()]
self.config.set_key("bitbox02", bitbox02_config)
def get_noise_privkey() -> Optional[bytes]:
bitbox02_config = self.config.get("bitbox02")
privkey = bitbox02_config.get("noise_privkey")
if privkey is not None:
return bytes.fromhex(privkey)
return None
def set_noise_privkey(privkey: bytes) -> None:
bitbox02_config = self.config.get("bitbox02")
bitbox02_config["noise_privkey"] = privkey.hex()
self.config.set_key("bitbox02", bitbox02_config)
def attestation_warning() -> None:
self.handler.show_error(
"The BitBox02 attestation failed.\nTry reconnecting the BitBox02.\nWarning: The device might not be genuine, if the\n problem persists please contact Shift support.",
blocking=True
)
class NoiseConfig(bitbox_api_protocol.BitBoxNoiseConfig):
"""NoiseConfig extends BitBoxNoiseConfig"""
def show_pairing(self, code: str, device_response: Callable[[], bool]) -> bool:
return pairing_step(code, device_response)
def attestation_check(self, result: bool) -> None:
if not result:
attestation_warning()
def contains_device_static_pubkey(self, pubkey: bytes) -> bool:
return exists_remote_static_pubkey(pubkey)
def add_device_static_pubkey(self, pubkey: bytes) -> None:
return set_remote_static_pubkey(pubkey)
def get_app_static_privkey(self) -> Optional[bytes]:
return get_noise_privkey()
def set_app_static_privkey(self, privkey: bytes) -> None:
return set_noise_privkey(privkey)
if self.bitbox02_device is None:
hid_device = hid.device()
hid_device.open_path(self.bitbox_hid_info["path"])
bitbox02_device = bitbox02.BitBox02(
transport=u2fhid.U2FHid(hid_device),
device_info=self.bitbox_hid_info,
noise_config=NoiseConfig(),
)
try:
bitbox02_device.check_min_version()
except FirmwareVersionOutdatedException:
raise
self.bitbox02_device = bitbox02_device
self.fail_if_not_initialized()
def fail_if_not_initialized(self) -> None:
assert self.bitbox02_device
if not self.bitbox02_device.device_info()["initialized"]:
raise Exception(
"Please initialize the BitBox02 using the BitBox app first before using the BitBox02 in electrum"
)
def coin_network_from_electrum_network(self) -> int:
if constants.net.TESTNET:
return bitbox02.btc.TBTC
return bitbox02.btc.BTC
@runs_in_hwd_thread
def get_password_for_storage_encryption(self) -> str:
derivation = get_derivation_used_for_hw_device_encryption()
derivation_list = bip32.convert_bip32_path_to_list_of_uint32(derivation)
xpub = self.bitbox02_device.electrum_encryption_key(derivation_list)
node = bip32.BIP32Node.from_xkey(xpub, net = constants.BitcoinMainnet()).subkey_at_public_derivation(())
return node.eckey.get_public_key_bytes(compressed=True).hex()
@runs_in_hwd_thread
def get_xpub(self, bip32_path: str, xtype: str, *, display: bool = False) -> str:
if self.bitbox02_device is None:
self.pairing_dialog()
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
self.fail_if_not_initialized()
xpub_keypath = bip32.convert_bip32_path_to_list_of_uint32(bip32_path)
coin_network = self.coin_network_from_electrum_network()
if xtype == "p2wpkh":
if coin_network == bitbox02.btc.BTC:
out_type = bitbox02.btc.BTCPubRequest.ZPUB
else:
out_type = bitbox02.btc.BTCPubRequest.VPUB
elif xtype == "p2wpkh-p2sh":
if coin_network == bitbox02.btc.BTC:
out_type = bitbox02.btc.BTCPubRequest.YPUB
else:
out_type = bitbox02.btc.BTCPubRequest.UPUB
elif xtype == "p2wsh-p2sh":
if coin_network == bitbox02.btc.BTC:
out_type = bitbox02.btc.BTCPubRequest.CAPITAL_YPUB
else:
out_type = bitbox02.btc.BTCPubRequest.CAPITAL_UPUB
elif xtype == "p2wsh":
if coin_network == bitbox02.btc.BTC:
out_type = bitbox02.btc.BTCPubRequest.CAPITAL_ZPUB
else:
out_type = bitbox02.btc.BTCPubRequest.CAPITAL_VPUB
# The other legacy types are not supported
else:
raise Exception("invalid xtype:{}".format(xtype))
return self.bitbox02_device.btc_xpub(
keypath=xpub_keypath,
xpub_type=out_type,
coin=coin_network,
display=display,
)
@runs_in_hwd_thread
def label(self) -> str:
if self.handler is None:
# Can't do the pairing without the handler. This happens at wallet creation time, when
# listing the devices.
return super().label()
if self.bitbox02_device is None:
self.pairing_dialog()
# We add the fingerprint to the label, as if there are two devices with the same label, the
# device manager can mistake one for another and fail.
return "%s (%s)" % (
self.bitbox02_device.device_info()["name"],
self.bitbox02_device.root_fingerprint().hex(),
)
@runs_in_hwd_thread
def request_root_fingerprint_from_device(self) -> str:
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
return self.bitbox02_device.root_fingerprint().hex()
def is_pairable(self) -> bool:
if self.bitbox_hid_info is None:
return False
return True
@runs_in_hwd_thread
def btc_multisig_config(
self, coin, bip32_path: List[int], wallet: Multisig_Wallet, xtype: str,
):
"""
Set and get a multisig config with the current device and some other arbitrary xpubs.
Registers it on the device if not already registered.
xtype: 'p2wsh' | 'p2wsh-p2sh'
"""
assert xtype in ("p2wsh", "p2wsh-p2sh")
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
account_keypath = bip32_path[:-2]
xpubs = wallet.get_master_public_keys()
our_xpub = self.get_xpub(
bip32.convert_bip32_intpath_to_strpath(account_keypath), xtype
)
multisig_config = bitbox02.btc.BTCScriptConfig(
multisig=bitbox02.btc.BTCScriptConfig.Multisig(
threshold=wallet.m,
xpubs=[util.parse_xpub(xpub) for xpub in xpubs],
our_xpub_index=xpubs.index(our_xpub),
script_type={
"p2wsh": bitbox02.btc.BTCScriptConfig.Multisig.P2WSH,
"p2wsh-p2sh": bitbox02.btc.BTCScriptConfig.Multisig.P2WSH_P2SH,
}[xtype]
)
)
is_registered = self.bitbox02_device.btc_is_script_config_registered(
coin, multisig_config, account_keypath
)
if not is_registered:
name = self.handler.name_multisig_account()
try:
self.bitbox02_device.btc_register_script_config(
coin=coin,
script_config=multisig_config,
keypath=account_keypath,
name=name,
)
except bitbox02.DuplicateEntryException:
raise
except:
raise UserFacingException("Failed to register multisig\naccount configuration on BitBox02")
return multisig_config
@runs_in_hwd_thread
def show_address(
self, bip32_path: str, address_type: str, wallet: Deterministic_Wallet
) -> str:
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
address_keypath = bip32.convert_bip32_path_to_list_of_uint32(bip32_path)
coin_network = self.coin_network_from_electrum_network()
if address_type == "p2wpkh":
script_config = bitbox02.btc.BTCScriptConfig(
simple_type=bitbox02.btc.BTCScriptConfig.P2WPKH
)
elif address_type == "p2wpkh-p2sh":
script_config = bitbox02.btc.BTCScriptConfig(
simple_type=bitbox02.btc.BTCScriptConfig.P2WPKH_P2SH
)
elif address_type in ("p2wsh-p2sh", "p2wsh"):
if type(wallet) is Multisig_Wallet:
script_config = self.btc_multisig_config(
coin_network, address_keypath, wallet, address_type,
)
else:
raise Exception("Can only use p2wsh-p2sh or p2wsh with multisig wallets")
else:
raise Exception(
"invalid address xtype: {} is not supported by the BitBox02".format(
address_type
)
)
return self.bitbox02_device.btc_address(
keypath=address_keypath,
coin=coin_network,
script_config=script_config,
display=True,
)
def _get_coin(self):
return bitbox02.btc.TBTC if constants.net.TESTNET else bitbox02.btc.BTC
@runs_in_hwd_thread
def sign_transaction(
self,
keystore: Hardware_KeyStore,
tx: PartialTransaction,
wallet: Deterministic_Wallet,
):
if tx.is_complete():
return
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
coin = self._get_coin()
tx_script_type = None
# Build BTCInputType list
inputs = []
for txin in tx.inputs():
my_pubkey, full_path = keystore.find_my_pubkey_in_txinout(txin)
if full_path is None:
raise Exception(
"A wallet owned pubkey was not found in the transaction input to be signed"
)
prev_tx = txin.utxo
if prev_tx is None:
raise UserFacingException(_('Missing previous tx.'))
prev_inputs: List[bitbox02.BTCPrevTxInputType] = []
prev_outputs: List[bitbox02.BTCPrevTxOutputType] = []
for prev_txin in prev_tx.inputs():
prev_inputs.append(
{
"prev_out_hash": prev_txin.prevout.txid[::-1],
"prev_out_index": prev_txin.prevout.out_idx,
"signature_script": prev_txin.script_sig,
"sequence": prev_txin.nsequence,
}
)
for prev_txout in prev_tx.outputs():
prev_outputs.append(
{
"value": prev_txout.value,
"pubkey_script": prev_txout.scriptpubkey,
}
)
inputs.append(
{
"prev_out_hash": txin.prevout.txid[::-1],
"prev_out_index": txin.prevout.out_idx,
"prev_out_value": txin.value_sats(),
"sequence": txin.nsequence,
"keypath": full_path,
"script_config_index": 0,
"prev_tx": {
"version": prev_tx.version,
"locktime": prev_tx.locktime,
"inputs": prev_inputs,
"outputs": prev_outputs,
},
}
)
if tx_script_type == None:
tx_script_type = txin.script_type
elif tx_script_type != txin.script_type:
raise Exception("Cannot mix different input script types")
if tx_script_type == "p2wpkh":
tx_script_type = bitbox02.btc.BTCScriptConfig(
simple_type=bitbox02.btc.BTCScriptConfig.P2WPKH
)
elif tx_script_type == "p2wpkh-p2sh":
tx_script_type = bitbox02.btc.BTCScriptConfig(
simple_type=bitbox02.btc.BTCScriptConfig.P2WPKH_P2SH
)
elif tx_script_type in ("p2wsh-p2sh", "p2wsh"):
if type(wallet) is Multisig_Wallet:
tx_script_type = self.btc_multisig_config(coin, full_path, wallet, tx_script_type)
else:
raise Exception("Can only use p2wsh-p2sh or p2wsh with multisig wallets")
else:
raise UserFacingException(
"invalid input script type: {} is not supported by the BitBox02".format(
tx_script_type
)
)
# Build BTCOutputType list
outputs = []
for txout in tx.outputs():
assert txout.address
# check for change
if txout.is_change:
my_pubkey, change_pubkey_path = keystore.find_my_pubkey_in_txinout(txout)
outputs.append(
bitbox02.BTCOutputInternal(
keypath=change_pubkey_path, value=txout.value, script_config_index=0,
)
)
else:
addrtype, pubkey_hash = bitcoin.address_to_hash(txout.address)
if addrtype == OnchainOutputType.P2PKH:
output_type = bitbox02.btc.P2PKH
elif addrtype == OnchainOutputType.P2SH:
output_type = bitbox02.btc.P2SH
elif addrtype == OnchainOutputType.WITVER0_P2WPKH:
output_type = bitbox02.btc.P2WPKH
elif addrtype == OnchainOutputType.WITVER0_P2WSH:
output_type = bitbox02.btc.P2WSH
else:
raise UserFacingException(
"Received unsupported output type during transaction signing: {} is not supported by the BitBox02".format(
addrtype
)
)
outputs.append(
bitbox02.BTCOutputExternal(
output_type=output_type,
output_hash=pubkey_hash,
value=txout.value,
)
)
keypath_account = full_path[:-2]
sigs = self.bitbox02_device.btc_sign(
coin,
[bitbox02.btc.BTCScriptConfigWithKeypath(
script_config=tx_script_type,
keypath=keypath_account,
)],
inputs=inputs,
outputs=outputs,
locktime=tx.locktime,
version=tx.version,
)
# Fill signatures
if len(sigs) != len(tx.inputs()):
raise Exception("Incorrect number of inputs signed.") # Should never occur
signatures = [bh2u(ecc.der_sig_from_sig_string(x[1])) + "01" for x in sigs]
tx.update_signatures(signatures)
def sign_message(self, keypath: str, message: bytes, xtype: str) -> bytes:
if self.bitbox02_device is None:
raise Exception(
"Need to setup communication first before attempting any BitBox02 calls"
)
try:
simple_type = {
"p2wpkh-p2sh":bitbox02.btc.BTCScriptConfig.P2WPKH_P2SH,
"p2wpkh": bitbox02.btc.BTCScriptConfig.P2WPKH,
}[xtype]
except KeyError:
raise UserFacingException("The BitBox02 does not support signing messages for this address type: {}".format(xtype))
_, _, signature = self.bitbox02_device.btc_sign_msg(
self._get_coin(),
bitbox02.btc.BTCScriptConfigWithKeypath(
script_config=bitbox02.btc.BTCScriptConfig(
simple_type=simple_type,
),
keypath=bip32.convert_bip32_path_to_list_of_uint32(keypath),
),
message,
)
return signature
class BitBox02_KeyStore(Hardware_KeyStore):
hw_type = "bitbox02"
device = "BitBox02"
plugin: "BitBox02Plugin"
def __init__(self, d: dict):
super().__init__(d)
self.force_watching_only = False
self.ux_busy = False
def get_client(self):
return self.plugin.get_client(self)
def give_error(self, message: Exception, clear_client: bool = False):
self.logger.info(message)
if not self.ux_busy:
self.handler.show_error(message)
else:
self.ux_busy = False
if clear_client:
self.client = None
raise UserFacingException(message)
def decrypt_message(self, pubkey, message, password):
raise UserFacingException(
_(
"Message encryption, decryption and signing are currently not supported for {}"
).format(self.device)
)
def sign_message(self, sequence, message, password):
if password:
raise Exception("BitBox02 does not accept a password from the host")
client = self.get_client()
keypath = self.get_derivation_prefix() + "/%d/%d" % sequence
xtype = self.get_bip32_node_for_xpub().xtype
return client.sign_message(keypath, message.encode("utf-8"), xtype)
@runs_in_hwd_thread
def sign_transaction(self, tx: PartialTransaction, password: str):
if tx.is_complete():
return
client = self.get_client()
assert isinstance(client, BitBox02Client)
try:
try:
self.handler.show_message("Authorize Transaction...")
client.sign_transaction(self, tx, self.handler.get_wallet())
finally:
self.handler.finished()
except Exception as e:
self.logger.exception("")
self.give_error(e, True)
return
@runs_in_hwd_thread
def show_address(
self, sequence: Tuple[int, int], txin_type: str, wallet: Deterministic_Wallet
):
client = self.get_client()
address_path = "{}/{}/{}".format(
self.get_derivation_prefix(), sequence[0], sequence[1]
)
try:
try:
self.handler.show_message(_("Showing address ..."))
dev_addr = client.show_address(address_path, txin_type, wallet)
finally:
self.handler.finished()
except Exception as e:
self.logger.exception("")
self.handler.show_error(e)
class BitBox02Plugin(HW_PluginBase):
keystore_class = BitBox02_KeyStore
minimum_library = (5, 2, 0)
DEVICE_IDS = [(0x03EB, 0x2403)]
SUPPORTED_XTYPES = ("p2wpkh-p2sh", "p2wpkh", "p2wsh", "p2wsh-p2sh")
def __init__(self, parent: HW_PluginBase, config: SimpleConfig, name: str):
super().__init__(parent, config, name)
self.libraries_available = self.check_libraries_available()
if not self.libraries_available:
return
self.device_manager().register_devices(self.DEVICE_IDS, plugin=self)
def get_library_version(self):
try:
from bitbox02 import bitbox02
version = bitbox02.__version__
except:
version = "unknown"
if requirements_ok:
return version
else:
raise ImportError()
# handler is a BitBox02_Handler
@runs_in_hwd_thread
def create_client(self, device: Device, handler: Any) -> BitBox02Client:
if not handler:
self.handler = handler
return BitBox02Client(handler, device, self.config, plugin=self)
def setup_device(
self, device_info: DeviceInfo, wizard: BaseWizard, purpose: int
):
device_id = device_info.device.id_
client = self.scan_and_create_client_for_device(device_id=device_id, wizard=wizard)
assert isinstance(client, BitBox02Client)
if client.bitbox02_device is None:
wizard.run_task_without_blocking_gui(
task=lambda client=client: client.pairing_dialog())
client.fail_if_not_initialized()
return client
def get_xpub(
self, device_id: str, derivation: str, xtype: str, wizard: BaseWizard
):
if xtype not in self.SUPPORTED_XTYPES:
raise ScriptTypeNotSupported(
_("This type of script is not supported with {}: {}").format(self.device, xtype)
)
client = self.scan_and_create_client_for_device(device_id=device_id, wizard=wizard)
assert isinstance(client, BitBox02Client)
assert client.bitbox02_device is not None
return client.get_xpub(derivation, xtype)
@runs_in_hwd_thread
def show_address(
self,
wallet: Deterministic_Wallet,
address: str,
keystore: BitBox02_KeyStore = None,
):
if keystore is None:
keystore = wallet.get_keystore()
if not self.show_address_helper(wallet, address, keystore):
return
txin_type = wallet.get_txin_type(address)
sequence = wallet.get_address_index(address)
keystore.show_address(sequence, txin_type, wallet)
@runs_in_hwd_thread
def show_xpub(self, keystore: BitBox02_KeyStore):
client = keystore.get_client()
assert isinstance(client, BitBox02Client)
derivation = keystore.get_derivation_prefix()
xtype = keystore.get_bip32_node_for_xpub().xtype
client.get_xpub(derivation, xtype, display=True)
def create_device_from_hid_enumeration(self, d: dict, *, product_key) -> 'Device':
device = super().create_device_from_hid_enumeration(d, product_key=product_key)
# The BitBox02's product_id is not unique per device, thus use the path instead to
# distinguish devices.
id_ = str(d['path'])
return device._replace(id_=id_)
|
"""
Parent classes for all parser classes
"""
__author__ = "Felix Simkovic"
__date__ = "04 Oct 2016"
__version__ = "0.1"
import abc
ABC = abc.ABCMeta("ABC", (object,), {})
from conkit.core.contact import Contact
from conkit.core.contactmap import ContactMap
from conkit.core.contactfile import ContactFile
from conkit.core.sequence import Sequence
from conkit.core.sequencefile import SequenceFile
class Parser(ABC):
"""Abstract class for all parsers
"""
@abc.abstractmethod
def read(self):
pass
@abc.abstractmethod
def write(self):
pass
@classmethod
def _reconstruct(cls, hierarchy):
"""Wrapper to re-construct full hierarchy when parts are provided"""
if isinstance(hierarchy, ContactFile):
h = hierarchy
elif isinstance(hierarchy, ContactMap):
h = ContactFile("conkit")
h.add(hierarchy)
elif isinstance(hierarchy, Contact):
h = ContactFile("conkit")
m = ContactMap("1")
m.add(hierarchy)
h.add(m)
elif isinstance(hierarchy, SequenceFile):
h = hierarchy
elif isinstance(hierarchy, Sequence):
h = SequenceFile("conkit")
h.add(hierarchy)
return h
class ContactFileParser(Parser):
"""General purpose class for all contact file parsers"""
pass
class SequenceFileParser(Parser):
"""General purpose class for all sequence file parsers"""
pass
|
import os
import sys
import random
import time
import json
import wikiquote
import tuitear
from threading import Thread
CONGIG_JSON = 'bots.json'
INTERVALO = 1
stop = False
def start_bot(bot):
""" Hilo que inicia el bot pasado como argumento (diccionario) """
citas = []
for pagina in bot['paginas']:
print 'Cargando', pagina
quotes = wikiquote.get_quotes(pagina.encode('utf8'))
quotes = [(q, pagina) for q in quotes]
citas += quotes
tiempo = 0
while not stop:
if tiempo >= bot['intervalo']:
quote, pagina = random.choice(citas)
tweet = bot['format'].encode('utf8') % dict(pagina = \
pagina.encode('utf8'), frase = quote.encode('utf8'))
if len(tweet) > 138:
#print 'tweet largo'
continue
print "%s: %s" % (bot['name'], tweet.decode('utf8'))
tuitear.tuitear(tweet, bot['consumer_key'], bot['consumer_secret'],
bot['access_token'], bot['access_token_secret'])
tiempo = 0
tiempo += INTERVALO
time.sleep(INTERVALO)
print 'Thread para', bot['name'], 'detenido'
def main():
path = os.path.dirname(__file__)
if len(sys.argv) == 2:
filename = sys.argv[1]
else:
filename = os.path.join(path, CONGIG_JSON)
print 'Cargando bots en', filename
j = json.load(file(filename))
for bot in j['bots']:
if bot.get('disabled'):
continue
thread = Thread(target = start_bot, args=[bot])
thread.daemon = True
thread.start()
print 'Thread para', bot['name'], 'iniciado'
while True:
# Para que no terminen los hilos
pass
if __name__ == '__main__':
main()
|
'''This is a testcase for the SmartOS datasource.
It replicates a serial console and acts like the SmartOS console does in
order to validate return responses.
'''
from __future__ import print_function
from binascii import crc32
import json
import multiprocessing
import os
import os.path
import re
import signal
import stat
import unittest2
import uuid
from cloudinit import serial
from cloudinit.sources import DataSourceSmartOS
from cloudinit.sources.DataSourceSmartOS import (
convert_smartos_network_data as convert_net,
SMARTOS_ENV_KVM, SERIAL_DEVICE, get_smartos_environ,
identify_file)
from cloudinit.event import EventType
import six
from cloudinit import helpers as c_helpers
from cloudinit.util import (
b64e, subp, ProcessExecutionError, which, write_file)
from cloudinit.tests.helpers import (
CiTestCase, mock, FilesystemMockingTestCase, skipIf)
try:
import serial as _pyserial
assert _pyserial # avoid pyflakes error F401: import unused
HAS_PYSERIAL = True
except ImportError:
HAS_PYSERIAL = False
DSMOS = 'cloudinit.sources.DataSourceSmartOS'
SDC_NICS = json.loads("""
[
{
"nic_tag": "external",
"primary": true,
"mtu": 1500,
"model": "virtio",
"gateway": "8.12.42.1",
"netmask": "255.255.255.0",
"ip": "8.12.42.102",
"network_uuid": "992fc7ce-6aac-4b74-aed6-7b9d2c6c0bfe",
"gateways": [
"8.12.42.1"
],
"vlan_id": 324,
"mac": "90:b8:d0:f5:e4:f5",
"interface": "net0",
"ips": [
"8.12.42.102/24"
]
},
{
"nic_tag": "sdc_overlay/16187209",
"gateway": "192.168.128.1",
"model": "virtio",
"mac": "90:b8:d0:a5:ff:cd",
"netmask": "255.255.252.0",
"ip": "192.168.128.93",
"network_uuid": "4cad71da-09bc-452b-986d-03562a03a0a9",
"gateways": [
"192.168.128.1"
],
"vlan_id": 2,
"mtu": 8500,
"interface": "net1",
"ips": [
"192.168.128.93/22"
]
}
]
""")
SDC_NICS_ALT = json.loads("""
[
{
"interface": "net0",
"mac": "90:b8:d0:ae:64:51",
"vlan_id": 324,
"nic_tag": "external",
"gateway": "8.12.42.1",
"gateways": [
"8.12.42.1"
],
"netmask": "255.255.255.0",
"ip": "8.12.42.51",
"ips": [
"8.12.42.51/24"
],
"network_uuid": "992fc7ce-6aac-4b74-aed6-7b9d2c6c0bfe",
"model": "virtio",
"mtu": 1500,
"primary": true
},
{
"interface": "net1",
"mac": "90:b8:d0:bd:4f:9c",
"vlan_id": 600,
"nic_tag": "internal",
"netmask": "255.255.255.0",
"ip": "10.210.1.217",
"ips": [
"10.210.1.217/24"
],
"network_uuid": "98657fdf-11f4-4ee2-88a4-ce7fe73e33a6",
"model": "virtio",
"mtu": 1500
}
]
""")
SDC_NICS_DHCP = json.loads("""
[
{
"interface": "net0",
"mac": "90:b8:d0:ae:64:51",
"vlan_id": 324,
"nic_tag": "external",
"gateway": "8.12.42.1",
"gateways": [
"8.12.42.1"
],
"netmask": "255.255.255.0",
"ip": "8.12.42.51",
"ips": [
"8.12.42.51/24"
],
"network_uuid": "992fc7ce-6aac-4b74-aed6-7b9d2c6c0bfe",
"model": "virtio",
"mtu": 1500,
"primary": true
},
{
"interface": "net1",
"mac": "90:b8:d0:bd:4f:9c",
"vlan_id": 600,
"nic_tag": "internal",
"netmask": "255.255.255.0",
"ip": "10.210.1.217",
"ips": [
"dhcp"
],
"network_uuid": "98657fdf-11f4-4ee2-88a4-ce7fe73e33a6",
"model": "virtio",
"mtu": 1500
}
]
""")
SDC_NICS_MIP = json.loads("""
[
{
"interface": "net0",
"mac": "90:b8:d0:ae:64:51",
"vlan_id": 324,
"nic_tag": "external",
"gateway": "8.12.42.1",
"gateways": [
"8.12.42.1"
],
"netmask": "255.255.255.0",
"ip": "8.12.42.51",
"ips": [
"8.12.42.51/24",
"8.12.42.52/24"
],
"network_uuid": "992fc7ce-6aac-4b74-aed6-7b9d2c6c0bfe",
"model": "virtio",
"mtu": 1500,
"primary": true
},
{
"interface": "net1",
"mac": "90:b8:d0:bd:4f:9c",
"vlan_id": 600,
"nic_tag": "internal",
"netmask": "255.255.255.0",
"ip": "10.210.1.217",
"ips": [
"10.210.1.217/24",
"10.210.1.151/24"
],
"network_uuid": "98657fdf-11f4-4ee2-88a4-ce7fe73e33a6",
"model": "virtio",
"mtu": 1500
}
]
""")
SDC_NICS_MIP_IPV6 = json.loads("""
[
{
"interface": "net0",
"mac": "90:b8:d0:ae:64:51",
"vlan_id": 324,
"nic_tag": "external",
"gateway": "8.12.42.1",
"gateways": [
"8.12.42.1"
],
"netmask": "255.255.255.0",
"ip": "8.12.42.51",
"ips": [
"2001:4800:78ff:1b:be76:4eff:fe06:96b3/64",
"8.12.42.51/24"
],
"network_uuid": "992fc7ce-6aac-4b74-aed6-7b9d2c6c0bfe",
"model": "virtio",
"mtu": 1500,
"primary": true
},
{
"interface": "net1",
"mac": "90:b8:d0:bd:4f:9c",
"vlan_id": 600,
"nic_tag": "internal",
"netmask": "255.255.255.0",
"ip": "10.210.1.217",
"ips": [
"10.210.1.217/24"
],
"network_uuid": "98657fdf-11f4-4ee2-88a4-ce7fe73e33a6",
"model": "virtio",
"mtu": 1500
}
]
""")
SDC_NICS_IPV4_IPV6 = json.loads("""
[
{
"interface": "net0",
"mac": "90:b8:d0:ae:64:51",
"vlan_id": 324,
"nic_tag": "external",
"gateway": "8.12.42.1",
"gateways": ["8.12.42.1", "2001::1", "2001::2"],
"netmask": "255.255.255.0",
"ip": "8.12.42.51",
"ips": ["2001::10/64", "8.12.42.51/24", "2001::11/64",
"8.12.42.52/32"],
"network_uuid": "992fc7ce-6aac-4b74-aed6-7b9d2c6c0bfe",
"model": "virtio",
"mtu": 1500,
"primary": true
},
{
"interface": "net1",
"mac": "90:b8:d0:bd:4f:9c",
"vlan_id": 600,
"nic_tag": "internal",
"netmask": "255.255.255.0",
"ip": "10.210.1.217",
"ips": ["10.210.1.217/24"],
"gateways": ["10.210.1.210"],
"network_uuid": "98657fdf-11f4-4ee2-88a4-ce7fe73e33a6",
"model": "virtio",
"mtu": 1500
}
]
""")
SDC_NICS_SINGLE_GATEWAY = json.loads("""
[
{
"interface":"net0",
"mac":"90:b8:d0:d8:82:b4",
"vlan_id":324,
"nic_tag":"external",
"gateway":"8.12.42.1",
"gateways":["8.12.42.1"],
"netmask":"255.255.255.0",
"ip":"8.12.42.26",
"ips":["8.12.42.26/24"],
"network_uuid":"992fc7ce-6aac-4b74-aed6-7b9d2c6c0bfe",
"model":"virtio",
"mtu":1500,
"primary":true
},
{
"interface":"net1",
"mac":"90:b8:d0:0a:51:31",
"vlan_id":600,
"nic_tag":"internal",
"netmask":"255.255.255.0",
"ip":"10.210.1.27",
"ips":["10.210.1.27/24"],
"network_uuid":"98657fdf-11f4-4ee2-88a4-ce7fe73e33a6",
"model":"virtio",
"mtu":1500
}
]
""")
MOCK_RETURNS = {
'hostname': 'test-host',
'root_authorized_keys': 'ssh-rsa AAAAB3Nz...aC1yc2E= keyname',
'disable_iptables_flag': None,
'enable_motd_sys_info': None,
'test-var1': 'some data',
'cloud-init:user-data': '\n'.join(['#!/bin/sh', '/bin/true', '']),
'sdc:datacenter_name': 'somewhere2',
'sdc:operator-script': '\n'.join(['bin/true', '']),
'sdc:uuid': str(uuid.uuid4()),
'sdc:vendor-data': '\n'.join(['VENDOR_DATA', '']),
'user-data': '\n'.join(['something', '']),
'user-script': '\n'.join(['/bin/true', '']),
'sdc:nics': json.dumps(SDC_NICS),
}
DMI_DATA_RETURN = 'smartdc'
SUCCESS_LEN = len('0123abcd SUCCESS ')
NOTFOUND_LEN = len('0123abcd NOTFOUND')
class PsuedoJoyentClient(object):
def __init__(self, data=None):
if data is None:
data = MOCK_RETURNS.copy()
self.data = data
self._is_open = False
return
def get(self, key, default=None, strip=False):
if key in self.data:
r = self.data[key]
if strip:
r = r.strip()
else:
r = default
return r
def get_json(self, key, default=None):
result = self.get(key, default=default)
if result is None:
return default
return json.loads(result)
def exists(self):
return True
def open_transport(self):
assert(not self._is_open)
self._is_open = True
def close_transport(self):
assert(self._is_open)
self._is_open = False
class TestSmartOSDataSource(FilesystemMockingTestCase):
jmc_cfact = None
get_smartos_environ = None
def setUp(self):
super(TestSmartOSDataSource, self).setUp()
self.add_patch(DSMOS + ".get_smartos_environ", "get_smartos_environ")
self.add_patch(DSMOS + ".jmc_client_factory", "jmc_cfact")
self.legacy_user_d = self.tmp_path('legacy_user_tmp')
os.mkdir(self.legacy_user_d)
self.add_patch(DSMOS + ".LEGACY_USER_D", "m_legacy_user_d",
autospec=False, new=self.legacy_user_d)
self.add_patch(DSMOS + ".identify_file", "m_identify_file",
return_value="text/plain")
def _get_ds(self, mockdata=None, mode=DataSourceSmartOS.SMARTOS_ENV_KVM,
sys_cfg=None, ds_cfg=None):
self.jmc_cfact.return_value = PsuedoJoyentClient(mockdata)
self.get_smartos_environ.return_value = mode
tmpd = self.tmp_dir()
dirs = {'cloud_dir': self.tmp_path('cloud_dir', tmpd),
'run_dir': self.tmp_path('run_dir')}
for d in dirs.values():
os.mkdir(d)
paths = c_helpers.Paths(dirs)
if sys_cfg is None:
sys_cfg = {}
if ds_cfg is not None:
sys_cfg['datasource'] = sys_cfg.get('datasource', {})
sys_cfg['datasource']['SmartOS'] = ds_cfg
return DataSourceSmartOS.DataSourceSmartOS(
sys_cfg, distro=None, paths=paths)
def test_no_base64(self):
ds_cfg = {'no_base64_decode': ['test_var1'], 'all_base': True}
dsrc = self._get_ds(ds_cfg=ds_cfg)
ret = dsrc.get_data()
self.assertTrue(ret)
def test_uuid(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(MOCK_RETURNS['sdc:uuid'],
dsrc.metadata['instance-id'])
def test_platform_info(self):
"""All platform-related attributes are properly set."""
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
self.assertEqual('joyent', dsrc.cloud_name)
self.assertEqual('joyent', dsrc.platform_type)
self.assertEqual('serial (/dev/ttyS1)', dsrc.subplatform)
def test_root_keys(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(MOCK_RETURNS['root_authorized_keys'],
dsrc.metadata['public-keys'])
def test_hostname_b64(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(MOCK_RETURNS['hostname'],
dsrc.metadata['local-hostname'])
def test_hostname(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(MOCK_RETURNS['hostname'],
dsrc.metadata['local-hostname'])
def test_hostname_if_no_sdc_hostname(self):
my_returns = MOCK_RETURNS.copy()
my_returns['sdc:hostname'] = 'sdc-' + my_returns['hostname']
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(my_returns['hostname'],
dsrc.metadata['local-hostname'])
def test_sdc_hostname_if_no_hostname(self):
my_returns = MOCK_RETURNS.copy()
my_returns['sdc:hostname'] = 'sdc-' + my_returns['hostname']
del my_returns['hostname']
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(my_returns['sdc:hostname'],
dsrc.metadata['local-hostname'])
def test_sdc_uuid_if_no_hostname_or_sdc_hostname(self):
my_returns = MOCK_RETURNS.copy()
del my_returns['hostname']
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(my_returns['sdc:uuid'],
dsrc.metadata['local-hostname'])
def test_userdata(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(MOCK_RETURNS['user-data'],
dsrc.metadata['legacy-user-data'])
self.assertEqual(MOCK_RETURNS['cloud-init:user-data'],
dsrc.userdata_raw)
def test_sdc_nics(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(json.loads(MOCK_RETURNS['sdc:nics']),
dsrc.metadata['network-data'])
def test_sdc_scripts(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(MOCK_RETURNS['user-script'],
dsrc.metadata['user-script'])
legacy_script_f = "%s/user-script" % self.legacy_user_d
print("legacy_script_f=%s" % legacy_script_f)
self.assertTrue(os.path.exists(legacy_script_f))
self.assertTrue(os.path.islink(legacy_script_f))
user_script_perm = oct(os.stat(legacy_script_f)[stat.ST_MODE])[-3:]
self.assertEqual(user_script_perm, '700')
def test_scripts_shebanged(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(MOCK_RETURNS['user-script'],
dsrc.metadata['user-script'])
legacy_script_f = "%s/user-script" % self.legacy_user_d
self.assertTrue(os.path.exists(legacy_script_f))
self.assertTrue(os.path.islink(legacy_script_f))
shebang = None
with open(legacy_script_f, 'r') as f:
shebang = f.readlines()[0].strip()
self.assertEqual(shebang, "#!/bin/bash")
user_script_perm = oct(os.stat(legacy_script_f)[stat.ST_MODE])[-3:]
self.assertEqual(user_script_perm, '700')
def test_scripts_shebang_not_added(self):
"""
Test that the SmartOS requirement that plain text scripts
are executable. This test makes sure that plain texts scripts
with out file magic have it added appropriately by cloud-init.
"""
my_returns = MOCK_RETURNS.copy()
my_returns['user-script'] = '\n'.join(['#!/usr/bin/perl',
'print("hi")', ''])
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(my_returns['user-script'],
dsrc.metadata['user-script'])
legacy_script_f = "%s/user-script" % self.legacy_user_d
self.assertTrue(os.path.exists(legacy_script_f))
self.assertTrue(os.path.islink(legacy_script_f))
shebang = None
with open(legacy_script_f, 'r') as f:
shebang = f.readlines()[0].strip()
self.assertEqual(shebang, "#!/usr/bin/perl")
def test_userdata_removed(self):
"""
User-data in the SmartOS world is supposed to be written to a file
each and every boot. This tests to make sure that in the event the
legacy user-data is removed, the existing user-data is backed-up
and there is no /var/db/user-data left.
"""
user_data_f = "%s/mdata-user-data" % self.legacy_user_d
with open(user_data_f, 'w') as f:
f.write("PREVIOUS")
my_returns = MOCK_RETURNS.copy()
del my_returns['user-data']
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertFalse(dsrc.metadata.get('legacy-user-data'))
found_new = False
for root, _dirs, files in os.walk(self.legacy_user_d):
for name in files:
name_f = os.path.join(root, name)
permissions = oct(os.stat(name_f)[stat.ST_MODE])[-3:]
if re.match(r'.*\/mdata-user-data$', name_f):
found_new = True
print(name_f)
self.assertEqual(permissions, '400')
self.assertFalse(found_new)
def test_vendor_data_not_default(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(MOCK_RETURNS['sdc:vendor-data'],
dsrc.metadata['vendor-data'])
def test_default_vendor_data(self):
my_returns = MOCK_RETURNS.copy()
def_op_script = my_returns['sdc:vendor-data']
del my_returns['sdc:vendor-data']
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertNotEqual(def_op_script, dsrc.metadata['vendor-data'])
# we expect default vendor-data is a boothook
self.assertTrue(dsrc.vendordata_raw.startswith("#cloud-boothook"))
def test_disable_iptables_flag(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(MOCK_RETURNS['disable_iptables_flag'],
dsrc.metadata['iptables_disable'])
def test_motd_sys_info(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(MOCK_RETURNS['enable_motd_sys_info'],
dsrc.metadata['motd_sys_info'])
def test_default_ephemeral(self):
# Test to make sure that the builtin config has the ephemeral
# configuration.
dsrc = self._get_ds()
cfg = dsrc.get_config_obj()
ret = dsrc.get_data()
self.assertTrue(ret)
assert 'disk_setup' in cfg
assert 'fs_setup' in cfg
self.assertIsInstance(cfg['disk_setup'], dict)
self.assertIsInstance(cfg['fs_setup'], list)
def test_override_disk_aliases(self):
# Test to make sure that the built-in DS is overriden
builtin = DataSourceSmartOS.BUILTIN_DS_CONFIG
mydscfg = {'disk_aliases': {'FOO': '/dev/bar'}}
# expect that these values are in builtin, or this is pointless
for k in mydscfg:
self.assertIn(k, builtin)
dsrc = self._get_ds(ds_cfg=mydscfg)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(mydscfg['disk_aliases']['FOO'],
dsrc.ds_cfg['disk_aliases']['FOO'])
self.assertEqual(dsrc.device_name_to_device('FOO'),
mydscfg['disk_aliases']['FOO'])
def test_reconfig_network_on_boot(self):
# Test to ensure that network is configured from metadata on each boot
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
self.assertSetEqual(set([EventType.BOOT_NEW_INSTANCE, EventType.BOOT]),
dsrc.update_events['network'])
class TestIdentifyFile(CiTestCase):
"""Test the 'identify_file' utility."""
@skipIf(not which("file"), "command 'file' not available.")
def test_file_happy_path(self):
"""Test file is available and functional on plain text."""
fname = self.tmp_path("myfile")
write_file(fname, "plain text content here\n")
with self.allow_subp(["file"]):
self.assertEqual("text/plain", identify_file(fname))
@mock.patch(DSMOS + ".util.subp")
def test_returns_none_on_error(self, m_subp):
"""On 'file' execution error, None should be returned."""
m_subp.side_effect = ProcessExecutionError("FILE_FAILED", exit_code=99)
fname = self.tmp_path("myfile")
write_file(fname, "plain text content here\n")
self.assertEqual(None, identify_file(fname))
self.assertEqual(
[mock.call(["file", "--brief", "--mime-type", fname])],
m_subp.call_args_list)
class ShortReader(object):
"""Implements a 'read' interface for bytes provided.
much like io.BytesIO but the 'endbyte' acts as if EOF.
When it is reached a short will be returned."""
def __init__(self, initial_bytes, endbyte=b'\0'):
self.data = initial_bytes
self.index = 0
self.len = len(self.data)
self.endbyte = endbyte
@property
def emptied(self):
return self.index >= self.len
def read(self, size=-1):
"""Read size bytes but not past a null."""
if size == 0 or self.index >= self.len:
return b''
rsize = size
if size < 0 or size + self.index > self.len:
rsize = self.len - self.index
next_null = self.data.find(self.endbyte, self.index, rsize)
if next_null >= 0:
rsize = next_null - self.index + 1
i = self.index
self.index += rsize
ret = self.data[i:i + rsize]
if len(ret) and ret[-1:] == self.endbyte:
ret = ret[:-1]
return ret
class TestJoyentMetadataClient(FilesystemMockingTestCase):
invalid = b'invalid command\n'
failure = b'FAILURE\n'
v2_ok = b'V2_OK\n'
def setUp(self):
super(TestJoyentMetadataClient, self).setUp()
self.serial = mock.MagicMock(spec=serial.Serial)
self.request_id = 0xabcdef12
self.metadata_value = 'value'
self.response_parts = {
'command': 'SUCCESS',
'crc': 'b5a9ff00',
'length': SUCCESS_LEN + len(b64e(self.metadata_value)),
'payload': b64e(self.metadata_value),
'request_id': '{0:08x}'.format(self.request_id),
}
def make_response():
payloadstr = ''
if 'payload' in self.response_parts:
payloadstr = ' {0}'.format(self.response_parts['payload'])
return ('V2 {length} {crc} {request_id} '
'{command}{payloadstr}\n'.format(
payloadstr=payloadstr,
**self.response_parts).encode('ascii'))
self.metasource_data = None
def read_response(length):
if not self.metasource_data:
self.metasource_data = make_response()
self.metasource_data_len = len(self.metasource_data)
resp = self.metasource_data[:length]
self.metasource_data = self.metasource_data[length:]
return resp
self.serial.read.side_effect = read_response
self.patched_funcs.enter_context(
mock.patch('cloudinit.sources.DataSourceSmartOS.random.randint',
mock.Mock(return_value=self.request_id)))
def _get_client(self):
return DataSourceSmartOS.JoyentMetadataClient(
fp=self.serial, smartos_type=DataSourceSmartOS.SMARTOS_ENV_KVM)
def _get_serial_client(self):
self.serial.timeout = 1
return DataSourceSmartOS.JoyentMetadataSerialClient(None,
fp=self.serial)
def assertEndsWith(self, haystack, prefix):
self.assertTrue(haystack.endswith(prefix),
"{0} does not end with '{1}'".format(
repr(haystack), prefix))
def assertStartsWith(self, haystack, prefix):
self.assertTrue(haystack.startswith(prefix),
"{0} does not start with '{1}'".format(
repr(haystack), prefix))
def assertNoMoreSideEffects(self, obj):
self.assertRaises(StopIteration, obj)
def test_get_metadata_writes_a_single_line(self):
client = self._get_client()
client.get('some_key')
self.assertEqual(1, self.serial.write.call_count)
written_line = self.serial.write.call_args[0][0]
self.assertEndsWith(written_line.decode('ascii'),
b'\n'.decode('ascii'))
self.assertEqual(1, written_line.count(b'\n'))
def _get_written_line(self, key='some_key'):
client = self._get_client()
client.get(key)
return self.serial.write.call_args[0][0]
def test_get_metadata_writes_bytes(self):
self.assertIsInstance(self._get_written_line(), six.binary_type)
def test_get_metadata_line_starts_with_v2(self):
foo = self._get_written_line()
self.assertStartsWith(foo.decode('ascii'), b'V2'.decode('ascii'))
def test_get_metadata_uses_get_command(self):
parts = self._get_written_line().decode('ascii').strip().split(' ')
self.assertEqual('GET', parts[4])
def test_get_metadata_base64_encodes_argument(self):
key = 'my_key'
parts = self._get_written_line(key).decode('ascii').strip().split(' ')
self.assertEqual(b64e(key), parts[5])
def test_get_metadata_calculates_length_correctly(self):
parts = self._get_written_line().decode('ascii').strip().split(' ')
expected_length = len(' '.join(parts[3:]))
self.assertEqual(expected_length, int(parts[1]))
def test_get_metadata_uses_appropriate_request_id(self):
parts = self._get_written_line().decode('ascii').strip().split(' ')
request_id = parts[3]
self.assertEqual(8, len(request_id))
self.assertEqual(request_id, request_id.lower())
def test_get_metadata_uses_random_number_for_request_id(self):
line = self._get_written_line()
request_id = line.decode('ascii').strip().split(' ')[3]
self.assertEqual('{0:08x}'.format(self.request_id), request_id)
def test_get_metadata_checksums_correctly(self):
parts = self._get_written_line().decode('ascii').strip().split(' ')
expected_checksum = '{0:08x}'.format(
crc32(' '.join(parts[3:]).encode('utf-8')) & 0xffffffff)
checksum = parts[2]
self.assertEqual(expected_checksum, checksum)
def test_get_metadata_reads_a_line(self):
client = self._get_client()
client.get('some_key')
self.assertEqual(self.metasource_data_len, self.serial.read.call_count)
def test_get_metadata_returns_valid_value(self):
client = self._get_client()
value = client.get('some_key')
self.assertEqual(self.metadata_value, value)
def test_get_metadata_throws_exception_for_incorrect_length(self):
self.response_parts['length'] = 0
client = self._get_client()
self.assertRaises(DataSourceSmartOS.JoyentMetadataFetchException,
client.get, 'some_key')
def test_get_metadata_throws_exception_for_incorrect_crc(self):
self.response_parts['crc'] = 'deadbeef'
client = self._get_client()
self.assertRaises(DataSourceSmartOS.JoyentMetadataFetchException,
client.get, 'some_key')
def test_get_metadata_throws_exception_for_request_id_mismatch(self):
self.response_parts['request_id'] = 'deadbeef'
client = self._get_client()
client._checksum = lambda _: self.response_parts['crc']
self.assertRaises(DataSourceSmartOS.JoyentMetadataFetchException,
client.get, 'some_key')
def test_get_metadata_returns_None_if_value_not_found(self):
self.response_parts['payload'] = ''
self.response_parts['command'] = 'NOTFOUND'
self.response_parts['length'] = NOTFOUND_LEN
client = self._get_client()
client._checksum = lambda _: self.response_parts['crc']
self.assertIsNone(client.get('some_key'))
def test_negotiate(self):
client = self._get_client()
reader = ShortReader(self.v2_ok)
client.fp.read.side_effect = reader.read
client._negotiate()
self.assertTrue(reader.emptied)
def test_negotiate_short_response(self):
client = self._get_client()
# chopped '\n' from v2_ok.
reader = ShortReader(self.v2_ok[:-1] + b'\0')
client.fp.read.side_effect = reader.read
self.assertRaises(DataSourceSmartOS.JoyentMetadataTimeoutException,
client._negotiate)
self.assertTrue(reader.emptied)
def test_negotiate_bad_response(self):
client = self._get_client()
reader = ShortReader(b'garbage\n' + self.v2_ok)
client.fp.read.side_effect = reader.read
self.assertRaises(DataSourceSmartOS.JoyentMetadataFetchException,
client._negotiate)
self.assertEqual(self.v2_ok, client.fp.read())
def test_serial_open_transport(self):
client = self._get_serial_client()
reader = ShortReader(b'garbage\0' + self.invalid + self.v2_ok)
client.fp.read.side_effect = reader.read
client.open_transport()
self.assertTrue(reader.emptied)
def test_flush_failure(self):
client = self._get_serial_client()
reader = ShortReader(b'garbage' + b'\0' + self.failure +
self.invalid + self.v2_ok)
client.fp.read.side_effect = reader.read
client.open_transport()
self.assertTrue(reader.emptied)
def test_flush_many_timeouts(self):
client = self._get_serial_client()
reader = ShortReader(b'\0' * 100 + self.invalid + self.v2_ok)
client.fp.read.side_effect = reader.read
client.open_transport()
self.assertTrue(reader.emptied)
def test_list_metadata_returns_list(self):
parts = ['foo', 'bar']
value = b64e('\n'.join(parts))
self.response_parts['payload'] = value
self.response_parts['crc'] = '40873553'
self.response_parts['length'] = SUCCESS_LEN + len(value)
client = self._get_client()
self.assertEqual(client.list(), parts)
def test_list_metadata_returns_empty_list_if_no_customer_metadata(self):
del self.response_parts['payload']
self.response_parts['length'] = SUCCESS_LEN - 1
self.response_parts['crc'] = '14e563ba'
client = self._get_client()
self.assertEqual(client.list(), [])
class TestNetworkConversion(CiTestCase):
def test_convert_simple(self):
expected = {
'version': 1,
'config': [
{'name': 'net0', 'type': 'physical',
'subnets': [{'type': 'static', 'gateway': '8.12.42.1',
'address': '8.12.42.102/24'}],
'mtu': 1500, 'mac_address': '90:b8:d0:f5:e4:f5'},
{'name': 'net1', 'type': 'physical',
'subnets': [{'type': 'static',
'address': '192.168.128.93/22'}],
'mtu': 8500, 'mac_address': '90:b8:d0:a5:ff:cd'}]}
found = convert_net(SDC_NICS)
self.assertEqual(expected, found)
def test_convert_simple_alt(self):
expected = {
'version': 1,
'config': [
{'name': 'net0', 'type': 'physical',
'subnets': [{'type': 'static', 'gateway': '8.12.42.1',
'address': '8.12.42.51/24'}],
'mtu': 1500, 'mac_address': '90:b8:d0:ae:64:51'},
{'name': 'net1', 'type': 'physical',
'subnets': [{'type': 'static',
'address': '10.210.1.217/24'}],
'mtu': 1500, 'mac_address': '90:b8:d0:bd:4f:9c'}]}
found = convert_net(SDC_NICS_ALT)
self.assertEqual(expected, found)
def test_convert_simple_dhcp(self):
expected = {
'version': 1,
'config': [
{'name': 'net0', 'type': 'physical',
'subnets': [{'type': 'static', 'gateway': '8.12.42.1',
'address': '8.12.42.51/24'}],
'mtu': 1500, 'mac_address': '90:b8:d0:ae:64:51'},
{'name': 'net1', 'type': 'physical',
'subnets': [{'type': 'dhcp4'}],
'mtu': 1500, 'mac_address': '90:b8:d0:bd:4f:9c'}]}
found = convert_net(SDC_NICS_DHCP)
self.assertEqual(expected, found)
def test_convert_simple_multi_ip(self):
expected = {
'version': 1,
'config': [
{'name': 'net0', 'type': 'physical',
'subnets': [{'type': 'static', 'gateway': '8.12.42.1',
'address': '8.12.42.51/24'},
{'type': 'static',
'address': '8.12.42.52/24'}],
'mtu': 1500, 'mac_address': '90:b8:d0:ae:64:51'},
{'name': 'net1', 'type': 'physical',
'subnets': [{'type': 'static',
'address': '10.210.1.217/24'},
{'type': 'static',
'address': '10.210.1.151/24'}],
'mtu': 1500, 'mac_address': '90:b8:d0:bd:4f:9c'}]}
found = convert_net(SDC_NICS_MIP)
self.assertEqual(expected, found)
def test_convert_with_dns(self):
expected = {
'version': 1,
'config': [
{'name': 'net0', 'type': 'physical',
'subnets': [{'type': 'static', 'gateway': '8.12.42.1',
'address': '8.12.42.51/24'}],
'mtu': 1500, 'mac_address': '90:b8:d0:ae:64:51'},
{'name': 'net1', 'type': 'physical',
'subnets': [{'type': 'dhcp4'}],
'mtu': 1500, 'mac_address': '90:b8:d0:bd:4f:9c'},
{'type': 'nameserver',
'address': ['8.8.8.8', '8.8.8.1'], 'search': ["local"]}]}
found = convert_net(
network_data=SDC_NICS_DHCP, dns_servers=['8.8.8.8', '8.8.8.1'],
dns_domain="local")
self.assertEqual(expected, found)
def test_convert_simple_multi_ipv6(self):
expected = {
'version': 1,
'config': [
{'name': 'net0', 'type': 'physical',
'subnets': [{'type': 'static', 'address':
'2001:4800:78ff:1b:be76:4eff:fe06:96b3/64'},
{'type': 'static', 'gateway': '8.12.42.1',
'address': '8.12.42.51/24'}],
'mtu': 1500, 'mac_address': '90:b8:d0:ae:64:51'},
{'name': 'net1', 'type': 'physical',
'subnets': [{'type': 'static',
'address': '10.210.1.217/24'}],
'mtu': 1500, 'mac_address': '90:b8:d0:bd:4f:9c'}]}
found = convert_net(SDC_NICS_MIP_IPV6)
self.assertEqual(expected, found)
def test_convert_simple_both_ipv4_ipv6(self):
expected = {
'version': 1,
'config': [
{'mac_address': '90:b8:d0:ae:64:51', 'mtu': 1500,
'name': 'net0', 'type': 'physical',
'subnets': [{'address': '2001::10/64', 'gateway': '2001::1',
'type': 'static'},
{'address': '8.12.42.51/24',
'gateway': '8.12.42.1',
'type': 'static'},
{'address': '2001::11/64', 'type': 'static'},
{'address': '8.12.42.52/32', 'type': 'static'}]},
{'mac_address': '90:b8:d0:bd:4f:9c', 'mtu': 1500,
'name': 'net1', 'type': 'physical',
'subnets': [{'address': '10.210.1.217/24',
'type': 'static'}]}]}
found = convert_net(SDC_NICS_IPV4_IPV6)
self.assertEqual(expected, found)
def test_gateways_not_on_all_nics(self):
expected = {
'version': 1,
'config': [
{'mac_address': '90:b8:d0:d8:82:b4', 'mtu': 1500,
'name': 'net0', 'type': 'physical',
'subnets': [{'address': '8.12.42.26/24',
'gateway': '8.12.42.1', 'type': 'static'}]},
{'mac_address': '90:b8:d0:0a:51:31', 'mtu': 1500,
'name': 'net1', 'type': 'physical',
'subnets': [{'address': '10.210.1.27/24',
'type': 'static'}]}]}
found = convert_net(SDC_NICS_SINGLE_GATEWAY)
self.assertEqual(expected, found)
def test_routes_on_all_nics(self):
routes = [
{'linklocal': False, 'dst': '3.0.0.0/8', 'gateway': '8.12.42.3'},
{'linklocal': False, 'dst': '4.0.0.0/8', 'gateway': '10.210.1.4'}]
expected = {
'version': 1,
'config': [
{'mac_address': '90:b8:d0:d8:82:b4', 'mtu': 1500,
'name': 'net0', 'type': 'physical',
'subnets': [{'address': '8.12.42.26/24',
'gateway': '8.12.42.1', 'type': 'static',
'routes': [{'network': '3.0.0.0/8',
'gateway': '8.12.42.3'},
{'network': '4.0.0.0/8',
'gateway': '10.210.1.4'}]}]},
{'mac_address': '90:b8:d0:0a:51:31', 'mtu': 1500,
'name': 'net1', 'type': 'physical',
'subnets': [{'address': '10.210.1.27/24', 'type': 'static',
'routes': [{'network': '3.0.0.0/8',
'gateway': '8.12.42.3'},
{'network': '4.0.0.0/8',
'gateway': '10.210.1.4'}]}]}]}
found = convert_net(SDC_NICS_SINGLE_GATEWAY, routes=routes)
self.maxDiff = None
self.assertEqual(expected, found)
@unittest2.skipUnless(get_smartos_environ() == SMARTOS_ENV_KVM,
"Only supported on KVM and bhyve guests under SmartOS")
@unittest2.skipUnless(os.access(SERIAL_DEVICE, os.W_OK),
"Requires write access to " + SERIAL_DEVICE)
@unittest2.skipUnless(HAS_PYSERIAL is True, "pyserial not available")
class TestSerialConcurrency(CiTestCase):
"""
This class tests locking on an actual serial port, and as such can only
be run in a kvm or bhyve guest running on a SmartOS host. A test run on
a metadata socket will not be valid because a metadata socket ensures
there is only one session over a connection. In contrast, in the
absence of proper locking multiple processes opening the same serial
port can corrupt each others' exchanges with the metadata server.
This takes on the order of 2 to 3 minutes to run.
"""
allowed_subp = ['mdata-get']
def setUp(self):
self.mdata_proc = multiprocessing.Process(target=self.start_mdata_loop)
self.mdata_proc.start()
super(TestSerialConcurrency, self).setUp()
def tearDown(self):
# os.kill() rather than mdata_proc.terminate() to avoid console spam.
os.kill(self.mdata_proc.pid, signal.SIGKILL)
self.mdata_proc.join()
super(TestSerialConcurrency, self).tearDown()
def start_mdata_loop(self):
"""
The mdata-get command is repeatedly run in a separate process so
that it may try to race with metadata operations performed in the
main test process. Use of mdata-get is better than two processes
using the protocol implementation in DataSourceSmartOS because we
are testing to be sure that cloud-init and mdata-get respect each
others locks.
"""
rcs = list(range(0, 256))
while True:
subp(['mdata-get', 'sdc:routes'], rcs=rcs)
def test_all_keys(self):
self.assertIsNotNone(self.mdata_proc.pid)
ds = DataSourceSmartOS
keys = [tup[0] for tup in ds.SMARTOS_ATTRIB_MAP.values()]
keys.extend(ds.SMARTOS_ATTRIB_JSON.values())
client = ds.jmc_client_factory(smartos_type=SMARTOS_ENV_KVM)
self.assertIsNotNone(client)
# The behavior that we are testing for was observed mdata-get running
# 10 times at roughly the same time as cloud-init fetched each key
# once. cloud-init would regularly see failures before making it
# through all keys once.
for _ in range(0, 3):
for key in keys:
# We don't care about the return value, just that it doesn't
# thrown any exceptions.
client.get(key)
self.assertIsNone(self.mdata_proc.exitcode)
|
import os
import logging
import tornado.options as opt
from motherbrain.base import conf
from motherbrain.base.conf import get_config
SITE_CONF = conf.site_conf(os.getcwd())
DATADIR = SITE_CONF.get('env.motherbrain_data', '/tmp')
API_URL = SITE_CONF.get('env.api_url')
WEBCLIENT_URL = SITE_CONF.get('env.webclient_url')
_srv_opts = {
'config': {'default': 'api_server.cfg',
'help': 'Configuration File'},
'port': {'default': 8888, 'type': int,
'help': 'Tornado Port'},
'debug': {'default': True, 'type': bool},
'cors_hosts': {'default': '''http://api.urli.st '''
'''http://urli.st '''
'''http://localhost:9999 '''
'''http://next.urli.st '''
'''http://urli.st '''
'''http://next.api.urli.st''',
'help': 'Hosts allowed to perform Cross Domain Request'},
'media_path': {'default': os.path.join(DATADIR, 'urlist_media')},
'media_url': {'default': '/media'},
'static_url': {'default': 'http://static.urli.st'},
'base_url': {'default': API_URL},
'webclient_url': {'default': WEBCLIENT_URL}
}
_motherbrain_opts = {
'dispatcher_classname': {'default': 'MBDispatcherCluster',
'help': 'Motherbrain dispatcher class'},
'addresses': {'default': '''tcp://localhost:5555 tcp://localhost:5556 '''
'''tcp://localhost:5557 tcp://localhost:5558''',
'help': 'A space separated list of addresses'},
'datadir': {'default': os.path.join(DATADIR, 'motherbrain_data')}
}
_oauth_opts = {
'cookie_secret': {'default': 'XXX'},
'cookie_domain': {'default': SITE_CONF.get('oauth.cookie_domain')},
'facebook_secret': {'default': 'XXX'},
'facebook_api_key': {'default': 'XXX'},
'facebook_redirect_uri': {'default': '{}/login/facebook'.format(API_URL)},
'twitter_consumer_key': {'default': 'XXX'},
'twitter_consumer_secret': {'default': 'XXX'},
'urlist_salt': {'default': 'XXX'}
}
_db_opts = {
'dbname': {'default': 'urlist'},
'dbhost': {'default': 'mongo1'},
'dbport': {'default': 27017, 'type': int},
'dbusr': {'default': ''},
'dbpwd': {'default': ''},
}
_tornado_opts = {k: v.value() for k, v in opt.options.iteritems()}
_options = {'server': _srv_opts,
'database': _db_opts,
'tornado': _tornado_opts,
'oauth': _oauth_opts,
'motherbrain': _motherbrain_opts}
_cli_args = {'server': ['port', 'debug', 'config'],
'motherbrain': ['datadir']}
config = get_config(_options, _cli_args)
if SITE_CONF:
logging.info('CONF::SITE --- Read')
|
import imp
from flask.config import ConfigAttribute, Config as ConfigBase # noqa
class Config(ConfigBase):
"Configuration without the root_path"
def __init__(self, defaults=None):
dict.__init__(self, defaults or {})
def from_pyfile(self, filename):
"""
Updates the values in the config from a Python file. This function
behaves as if the file was imported as module with the
:meth:`from_object` function.
:param filename: the filename of the config. This can either be an
absolute filename or a filename relative to the
root path.
"""
d = imp.new_module('config')
d.__file__ = filename
try:
execfile(filename, d.__dict__)
except IOError, e:
e.strerror = 'Unable to load configuration file (%s)' % e.strerror
raise
self.from_object(d)
|
""" Here, we need some documentation...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import os
import types
import threading
import time
import six
from DIRAC import gLogger
from DIRAC.Core.Utilities.DIRACSingleton import DIRACSingleton
@six.add_metaclass(DIRACSingleton)
class Devloader(object):
def __init__(self):
self.__log = gLogger.getSubLogger("Devloader")
self.__reloaded = False
self.__enabled = True
self.__reloadTask = False
self.__stuffToClose = []
self.__watchedFiles = []
self.__modifyTimes = {}
def addStuffToClose(self, stuff):
self.__stuffToClose.append(stuff)
@property
def enabled(self):
return self.__enabled
def watchFile(self, fp):
if os.path.isfile(fp):
self.__watchedFiles.append(fp)
return True
return False
def __restart(self):
self.__reloaded = True
for stuff in self.__stuffToClose:
try:
self.__log.always("Closing %s" % stuff)
sys.stdout.flush()
stuff.close()
except Exception:
gLogger.exception("Could not close %s" % stuff)
python = sys.executable
os.execl(python, python, * sys.argv)
def bootstrap(self):
if not self.__enabled:
return False
if self.__reloadTask:
return True
self.__reloadTask = threading.Thread(target=self.__reloadOnUpdate)
self.__reloadTask.setDaemon(1)
self.__reloadTask.start()
def __reloadOnUpdate(self):
while True:
time.sleep(1)
if self.__reloaded:
return
for modName in sys.modules:
modObj = sys.modules[modName]
if not isinstance(modObj, types.ModuleType):
continue
path = getattr(modObj, "__file__", None)
if not path:
continue
if path.endswith(".pyc") or path.endswith(".pyo"):
path = path[:-1]
self.__checkFile(path)
for path in self.__watchedFiles:
self.__checkFile(path)
def __checkFile(self, path):
try:
modified = os.stat(path).st_mtime
except Exception:
return
if path not in self.__modifyTimes:
self.__modifyTimes[path] = modified
return
if self.__modifyTimes[path] != modified:
self.__log.always("File system changed (%s). Restarting..." % (path))
self.__restart()
|
import sys
import os
import logging
import threading
try:
# first we try system wide
import treewatcher
except ImportError:
# if it fails, we try it from the project source directory
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.path.pardir))
import treewatcher
from treewatcher import ThreadedEventsCallbacks, choose_source_tree_monitor
_LOGGER = logging.getLogger('_LOGGER')
_LOGGER.setLevel(logging.INFO)
_LOGGER.addHandler(logging.StreamHandler())
class MonitorCallbacks(ThreadedEventsCallbacks):
"""
Example callbacks which will output the event and path
This is a threaded type callbacks object : they will be
called from a different thread of the monitor.
We need to use logging here to prevent messy output.
You need to protect shared state from concurrent access
using Lock for example
"""
def create(self, path, is_dir):
""" callback called on a 'IN_CREATE' event """
_LOGGER.info("create: %s %s %s" % (path, is_dir, threading.current_thread().name))
def delete(self, path, is_dir):
""" callback called on a 'IN_DELETE' event """
_LOGGER.info("delete: %s %s %s" % (path, is_dir, threading.current_thread().name))
def close_write(self, path, is_dir):
""" callback called on a 'IN_CLOSE_WRITE' event """
_LOGGER.info("close_write: %s %s %s" % (path, is_dir, threading.current_thread().name))
def moved_from(self, path, is_dir):
""" callback called on a 'IN_MOVED_FROM' event """
_LOGGER.info("moved_from: %s %s %s" % (path, is_dir, threading.current_thread().name))
def moved_to(self, path, is_dir):
""" callback called on a 'IN_MOVED_TO' event """
_LOGGER.info("moved_to: %s %s %s" % (path, is_dir, threading.current_thread().name))
def modify(self, path, is_dir):
""" callback called on a 'IN_MODIFY' event """
_LOGGER.info("modify: %s %s %s" % (path, is_dir, threading.current_thread().name))
def attrib(self, path, is_dir):
""" callback called on a 'IN_ATTRIB' event """
_LOGGER.info("attrib: %s %s %s" % (path, is_dir, threading.current_thread().name))
def unmount(self, path, is_dir):
""" callback called on a 'IN_UNMOUNT' event """
_LOGGER.info("unmount: %s %s %s" % (path, is_dir, threading.current_thread().name))
if __name__ == '__main__':
# Yeah, command line parsing
if len(sys.argv) < 2:
print "usage:", sys.argv[0], "directory"
sys.exit(1)
# we check if the provided string is a valid directory
path_to_watch = sys.argv[1]
if not os.path.isdir(path_to_watch):
print path_to_watch, "is not a valid directory."
sys.exit(2)
# We instanciate our callbacks object
callbacks = MonitorCallbacks()
# we get a source tree monitor
stm = choose_source_tree_monitor()
# we set our callbacks
stm.set_events_callbacks(callbacks)
# we will use two threads to handle callbacks
stm.set_workers_number(2)
# we start the monitor
stm.start()
# after that, we can add the directory we want to watch
stm.add_source_dir(path_to_watch)
print "Watching directory", path_to_watch
print "Open a new terminal, and create/remove some folders and files in the", path_to_watch, "directory"
print "Ctrl-C to exit..."
try:
# without specific arguments, the next call will block forever
# open a terminal, and create/remove some folders and files
# this will last forever. use Ctrl-C to exit.
stm.process_events()
# see monitor-timeout-serial.py for an example with a timeout argument
except KeyboardInterrupt:
print "Stopping monitor."
finally:
# clean stop
stm.stop()
|
import sys
import os
import argparse
import zipfile
import tarfile
def make_zipfile(outname, filenames, prefix):
with zipfile.ZipFile(outname, "w", zipfile.ZIP_DEFLATED) as z:
for filename in filenames:
z.write(filename, prefix+filename)
def make_tarfile(outname, filenames, prefix, mode="w"):
with tarfile.open(outname, "w", zipfile.ZIP_DEFLATED) as z:
for filename in filenames:
z.add(filename, prefix+filename)
def make_tarfile_gz(outname, filenames, prefix):
return make_tarfile(outname, filenames, prefix, mode="w:gz")
def make_tarfile_bz2(outname, filenames, foldername):
return make_tarfile(outname, filenames, prefix, mode="w:bz2")
def make_tarfile_xz(outname, filenames, foldername):
return make_tarfile(outname, filenames, prefix, mode="w:xz")
formathandlers = [
(".zip", make_zipfile),
(".tar", make_tarfile),
(".tgz", make_tarfile_gz),
(".tar.gz", make_tarfile_gz),
(".tbz", make_tarfile_bz2),
(".tar.bz2", make_tarfile_bz2),
(".txz", make_tarfile_xz),
(".tar.xz", make_tarfile_xz),
]
tophelptext = """
Make a zip or tar archive containing specified files without a tar bomb.
"""
bottomhelptext = """
Supported output formats: """+", ".join(x[0] for x in formathandlers)
def parse_argv(argv):
p = argparse.ArgumentParser(
description=tophelptext, epilog=bottomhelptext
)
p.add_argument("filelist",
help="name of file containing newline-separated relative "
"paths to files to include, or - for standard input")
p.add_argument("foldername",
help="name of folder in archive (e.g. hello-1.2.5)")
p.add_argument("-o", "--output",
help="path of archive (default: foldername + .zip)")
return p.parse_args(argv[1:])
def get_writerfunc(outname):
outbaselower = os.path.basename(outname).lower()
for ext, writerfunc in formathandlers:
if outbaselower.endswith(ext):
return writerfunc
raise KeyError(os.path.splitext(outbaselower)[1])
def main(argv=None):
args = parse_argv(argv or sys.argv)
if args.filelist == '-':
filenames = set(sys.stdin)
else:
with open(args.filelist, "r") as infp:
filenames = set(infp)
filenames = set(x.strip() for x in filenames)
filenames = sorted(x for x in filenames if x)
outname = args.output or args.foldername + ".zip"
writerfunc = get_writerfunc(outname)
writerfunc(outname, filenames, args.foldername+"/")
if __name__=='__main__':
main()
|
from upqjob import UpqJob
from upqdb import UpqDB
from time import time
import os
import shutil
import requests
class Download(UpqJob):
"""
"download url:$url"
"""
def run(self):
url=self.jobdata['url']
filename=os.path.basename(url)
tmpfile=os.path.join(self.getcfg('temppath', '/tmp'), filename)
self.jobdata['file']=tmpfile
self.logger.debug("going to download %s", url)
try:
response = requests.get(url, stream=True, verify=False)
with open(tmpfile, 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
del response
self.logger.debug("downloaded to %s", tmpfile)
except Exception as e:
self.logger.error(str(e))
return False
return True
|
from bdchecker.api import NewDatabaseTaskChecker
from bdcheckerapp.autograding.zaj5.unit5.utils import Zaj5TaskChecker, UserList
class TaskChecker(NewDatabaseTaskChecker):
display_stdout = True
class TestSuite(Zaj5TaskChecker):
def test_has_procedure(self):
self.assert_has_procedure("add_user")
def test_view_is_empty_at_the_beginning(self):
self.assertEqual(len(list(self.session.query(UserList.username))), 0,
msg="Widok \"LIST_USERS\" powinien być pusty zaraz po stworzeniu schematu")
def test_user_role_can_add_users(self):
user = self.get_session("user")
try:
user.execute("SELECT add_user('foo', 'bar');")
user.flush()
except Exception as e:
raise AssertionError("Rola \"user\" nie mogła wywołać unkcji add_user") from e
self.assertEqual(list(user.query(self.UserList.username)), [("foo",)], msg="Po wykonaniu metody add_user nie było użytkownika w bazie danych")
def test_user_is_created_properly(self):
self.session.execute("SELECT add_user('foo', 'bar');")
self.assertEqual(
list(self.session.query(self.Users.username, self.Users.is_admin)), [("foo", 0)],
msg="Po stworzeniu użytkownika za pomocą add_user okazało się że nie został on stworzony poprawnie.")
|
import os
import requests
canary = 'mwtask111'
serverlist = ['mw101', 'mw102', 'mw111', 'mw112', 'mw121', 'mw122']
def check_up(server: str) -> bool:
headers = {'X-Miraheze-Debug': f'{server}.miraheze.org'}
req = requests.get('https://meta.miraheze.org/w/api.php?action=query&meta=siteinfo&formatversion=2&format=json', headers=headers)
if req.status_code == 200 and 'miraheze' in req.text and server in req.headers['X-Served-By']:
return True
return False
def check_ro(server: str) -> bool:
headers = {'X-Miraheze-Debug': f'{server}.miraheze.org'}
req = requests.get('https://meta.miraheze.org/w/api.php?action=query&meta=siteinfo&formatversion=2&format=json', headers=headers)
response = req.json()
if response['query']['general']['readonly']:
return True
return False
print('Welcome to the MediaWiki Upgrade tool!')
input('Please confirm you are running this script on the canary server: (press enter)')
input('MediaWiki -> RO - Running puppet to sync config')
os.system('sudo puppet agent -tv')
print('Config deployed')
print('Checking RO on Canary Server')
if not check_ro(canary):
input('Stopping deploy - RO check failed - Press enter to resume')
for server in serverlist:
print(f'Confirming RO on {server}')
if not check_ro(server):
input(f'RO check failed on {server} - Press enter to resume')
print('Starting staging update')
input('Press enter when branch updated in puppet: ')
os.system('sudo -u www-data rm -rf /srv/mediawiki-staging/w')
os.system('sudo puppet agent -tv')
print('Will now check mediawiki branch')
os.system('git -C /srv/mediawiki-staging/w rev-parse --abbrev-ref HEAD')
input('Confirm: ')
print('Will now deploy to canary server')
os.system('deploy-mediawiki --world --l10n --force --ignore-time --servers=skip')
if check_up(canary) and check_ro(canary):
print('Canary deploy done')
else:
print('Canary is not online')
input('Press enter to rollout: ')
for server in serverlist:
print(f'Will now deploy to {server}')
os.system(f'deploy-mediawiki --world --l10n --force --ignore-time --servers={server}')
if check_up(server) and check_ro(server):
print(f'{server} deploy done')
else:
input(f'{server} is not online - Proceed? ')
print('Deployment done')
input('Please merge RW change and press enter: ')
print('Running puppet')
os.system('sudo puppet agent -tv')
print('Deployment done')
|
import gettext
import os
def get_parent_dir(filepath, level=1):
'''Get parent dir.'''
parent_dir = os.path.realpath(filepath)
while(level > 0):
parent_dir = os.path.dirname(parent_dir)
level -= 1
return parent_dir
LOCALE_DIR=os.path.join(get_parent_dir(__file__, 2), "locale")
if not os.path.exists(LOCALE_DIR):
LOCALE_DIR="/usr/share/locale"
_ = None
try:
_ = gettext.translation("deepin-music-player", LOCALE_DIR).gettext
except Exception, e:
_ = lambda i : i
|
"""Module for calling Artist related last.fm web services API methods"""
__author__ = "Abhinav Sarkar <abhinav@abhinavsarkar.net>"
__version__ = "0.2"
__license__ = "GNU Lesser General Public License"
__package__ = "lastfm"
from lastfm.base import LastfmBase
from lastfm.mixin import mixin
from lastfm.decorators import cached_property, top_property
@mixin("crawlable", "shoutable", "sharable",
"taggable", "searchable", "cacheable", "property_adder")
class Artist(LastfmBase):
"""A class representing an artist."""
class Meta(object):
properties = ["name", "similar", "top_tags"]
fillable_properties = ["mbid", "url", "image",
"streamable", "stats", "bio"]
def init(self, api, subject = None, **kwargs):
"""
Create an Artist object by providing all the data related to it.
@param api: an instance of L{Api}
@type api: L{Api}
@param name: the artist name
@type name: L{str}
@param mbid: MBID of the artist
@type mbid: L{str}
@param url: URL of the artist on last.fm
@type url: L{str}
@param image: the images of the artist in various sizes
@type image: L{dict}
@param streamable: flag indicating if the artist is streamable from last.fm
@type streamable: L{bool}
@param stats: the artist statistics
@type stats: L{Stats}
@param similar: artists similar to the provided artist
@type similar: L{list} of L{Artist}
@param top_tags: top tags for the artist
@type top_tags: L{list} of L{Tag}
@param bio: biography of the artist
@type bio: L{Wiki}
@param subject: the subject to which this instance belongs to
@type subject: L{User} OR L{Artist} OR L{Tag} OR L{Track} OR L{WeeklyChart}
@raise InvalidParametersError: If an instance of L{Api} is not provided as the first
parameter then an Exception is raised.
"""
if not isinstance(api, Api):
raise InvalidParametersError("api reference must be supplied as an argument")
self._api = api
super(Artist, self).init(**kwargs)
self._stats = hasattr(self, "_stats") and Stats(
subject = self,
listeners = self._stats.listeners,
playcount = self._stats.playcount,
weight = self._stats.weight,
match = self._stats.match,
rank = self._stats.rank
) or None
self._bio = hasattr(self, "_bio") and Wiki(
subject = self,
published = self._bio.published,
summary = self._bio.summary,
content = self._bio.content
) or None
self._subject = subject
def get_similar(self, limit = None):
"""
Get the artists similar to this artist.
@param limit: the number of artists returned (optional)
@type limit: L{int}
@return: artists similar to this artist
@rtype: L{list} of L{Artist}
"""
params = self._default_params({'method': 'artist.getSimilar'})
if limit is not None:
params.update({'limit': limit})
data = self._api._fetch_data(params).find('similarartists')
self._similar = [
Artist(
self._api,
subject = self,
name = a.findtext('name'),
mbid = a.findtext('mbid'),
stats = Stats(
subject = a.findtext('name'),
match = float(a.findtext('match')),
),
url = 'http://' + a.findtext('url'),
image = {'large': a.findtext('image')}
)
for a in data.findall('artist')
]
return self._similar[:]
@property
def similar(self):
"""
artists similar to this artist
@rtype: L{list} of L{Artist}
"""
if not hasattr(self, "_similar") or self._similar is None or len(self._similar) < 6:
return self.get_similar()
return self._similar[:]
@top_property("similar")
def most_similar(self):
"""
artist most similar to this artist
@rtype: L{Artist}
"""
pass
@property
def top_tags(self):
"""
top tags for the artist
@rtype: L{list} of L{Tag}
"""
if not hasattr(self, "_top_tags") or self._top_tags is None or len(self._top_tags) < 6:
params = self._default_params({'method': 'artist.getTopTags'})
data = self._api._fetch_data(params).find('toptags')
self._top_tags = [
Tag(
self._api,
subject = self,
name = t.findtext('name'),
url = t.findtext('url')
)
for t in data.findall('tag')
]
return self._top_tags[:]
@top_property("top_tags")
def top_tag(self):
"""
top tag for the artist
@rtype: L{Tag}
"""
pass
@cached_property
def events(self):
"""
events for the artist
@rtype: L{lazylist} of L{Event}
"""
params = self._default_params({'method': 'artist.getEvents'})
data = self._api._fetch_data(params).find('events')
return [
Event.create_from_data(self._api, e)
for e in data.findall('event')
]
@cached_property
def top_albums(self):
"""
top albums of the artist
@rtype: L{list} of L{Album}
"""
params = self._default_params({'method': 'artist.getTopAlbums'})
data = self._api._fetch_data(params).find('topalbums')
return [
Album(
self._api,
subject = self,
name = a.findtext('name'),
artist = self,
mbid = a.findtext('mbid'),
url = a.findtext('url'),
image = dict([(i.get('size'), i.text) for i in a.findall('image')]),
stats = Stats(
subject = a.findtext('name'),
playcount = int(a.findtext('playcount')),
rank = int(a.attrib['rank'])
)
)
for a in data.findall('album')
]
@top_property("top_albums")
def top_album(self):
"""
top album of the artist
@rtype: L{Album}
"""
pass
@cached_property
def top_fans(self):
"""
top fans of the artist
@rtype: L{list} of L{User}
"""
params = self._default_params({'method': 'artist.getTopFans'})
data = self._api._fetch_data(params).find('topfans')
return [
User(
self._api,
subject = self,
name = u.findtext('name'),
url = u.findtext('url'),
image = dict([(i.get('size'), i.text) for i in u.findall('image')]),
stats = Stats(
subject = u.findtext('name'),
weight = int(u.findtext('weight'))
)
)
for u in data.findall('user')
]
@top_property("top_fans")
def top_fan(self):
"""
top fan of the artist
@rtype: L{User}"""
pass
@cached_property
def top_tracks(self):
"""
top tracks of the artist
@rtype: L{list} of L{Track}
"""
params = self._default_params({'method': 'artist.getTopTracks'})
data = self._api._fetch_data(params).find('toptracks')
return [
Track(
self._api,
subject = self,
name = t.findtext('name'),
artist = self,
mbid = t.findtext('mbid'),
stats = Stats(
subject = t.findtext('name'),
playcount = int(t.findtext('playcount')),
rank = int(t.attrib['rank'])
),
streamable = (t.findtext('streamable') == '1'),
full_track = (t.find('streamable').attrib['fulltrack'] == '1'),
image = dict([(i.get('size'), i.text) for i in t.findall('image')]),
)
for t in data.findall('track')
]
@top_property("top_tracks")
def top_track(self):
"""
topmost track of the artist
@rtype: L{Track}
"""
pass
@staticmethod
def get_info(api, artist = None, mbid = None):
"""
Get the data for the artist.
@param api: an instance of L{Api}
@type api: L{Api}
@param artist: the name of the artist
@type artist: L{str}
@param mbid: MBID of the artist
@type mbid: L{str}
@return: an Artist object corresponding the provided artist name
@rtype: L{Artist}
@raise lastfm.InvalidParametersError: Either artist or mbid parameter has to
be provided. Otherwise exception is raised.
@note: Use the L{Api.get_artist} method instead of using this method directly.
"""
data = Artist._fetch_data(api, artist, mbid)
a = Artist(api, name = data.findtext('name'))
a._fill_info()
return a
@staticmethod
def _get_all(seed_artist):
return (seed_artist, ['name'],
lambda api, hsh: Artist(api, **hsh).similar)
def _default_params(self, extra_params = None):
if not self.name:
raise InvalidParametersError("artist has to be provided.")
params = {'artist': self.name}
if extra_params is not None:
params.update(extra_params)
return params
@staticmethod
def _fetch_data(api,
artist = None,
mbid = None):
params = {'method': 'artist.getInfo'}
if not (artist or mbid):
raise InvalidParametersError("either artist or mbid has to be given as argument.")
if artist:
params.update({'artist': artist})
elif mbid:
params.update({'mbid': mbid})
return api._fetch_data(params).find('artist')
def _fill_info(self):
data = Artist._fetch_data(self._api, self.name)
self._name = data.findtext('name')
self._mbid = data.findtext('mbid')
self._url = data.findtext('url')
self._image = dict([(i.get('size'), i.text) for i in data.findall('image')])
self._streamable = (data.findtext('streamable') == 1)
if not self._stats:
self._stats = Stats(
subject = self,
listeners = int(data.findtext('stats/listeners')),
playcount = int(data.findtext('stats/playcount'))
)
self._top_tags = [
Tag(
self._api,
subject = self,
name = t.findtext('name'),
url = t.findtext('url')
)
for t in data.findall('tags/tag')
]
self._bio = Wiki(
self,
published = data.findtext('bio/published').strip() and
datetime(*(time.strptime(
data.findtext('bio/published').strip(),
'%a, %d %b %Y %H:%M:%S +0000'
)[0:6])),
summary = data.findtext('bio/summary'),
content = data.findtext('bio/content')
)
@staticmethod
def _search_yield_func(api, artist):
return Artist(
api,
name = artist.findtext('name'),
mbid = artist.findtext('mbid'),
url = artist.findtext('url'),
image = dict([(i.get('size'), i.text) for i in artist.findall('image')]),
streamable = (artist.findtext('streamable') == '1'),
)
@staticmethod
def _hash_func(*args, **kwds):
try:
return hash(kwds['name'].lower())
except KeyError:
try:
return hash(args[1].lower())
except IndexError:
raise InvalidParametersError("name has to be provided for hashing")
def __hash__(self):
return self.__class__._hash_func(name = self.name)
def __eq__(self, other):
if self.mbid and other.mbid:
return self.mbid == other.mbid
if self.url and other.url:
return self.url == other.url
return self.name == other.name
def __lt__(self, other):
return self.name < other.name
def __repr__(self):
return "<lastfm.Artist: %s>" % self._name
from datetime import datetime
import time
from lastfm.album import Album
from lastfm.api import Api
from lastfm.error import InvalidParametersError
from lastfm.event import Event
from lastfm.stats import Stats
from lastfm.tag import Tag
from lastfm.track import Track
from lastfm.user import User
from lastfm.wiki import Wiki
|
__author__ = 'shahbaz'
import sys
from functools import wraps
from logging import StreamHandler
from bitstring import BitArray
def singleton(f):
"""
:param f:
:return:
"""
return f()
def cached(f):
"""
:param f:
:return:
"""
@wraps(f)
def wrapper(*args):
"""
:param args:
:return:
"""
try:
return wrapper.cache[args]
except KeyError:
wrapper.cache[args] = v = f(*args)
return v
wrapper.cache = {}
return wrapper
class frozendict(object):
__slots__ = ["_dict", "_cached_hash"]
def __init__(self, new_dict=None, **kwargs):
"""
:param new_dict:
:param kwargs:
:return:
"""
self._dict = dict()
if new_dict is not None:
self._dict.update(new_dict)
self._dict.update(kwargs)
def update(self, new_dict=None, **kwargs):
"""
:param new_dict:
:param kwargs:
:return:
"""
d = self._dict.copy()
if new_dict is not None:
d.update(new_dict)
d.update(kwargs)
return self.__class__(d)
def remove(self, ks):
"""
:param ks:
:return:
"""
d = self._dict.copy()
for k in ks:
if k in d:
del d[k]
return self.__class__(d)
def pop(self, *ks):
"""
:param ks:
:return:
"""
result = []
for k in ks:
result.append(self[k])
result.append(self.remove(*ks))
return result
def __repr__(self):
"""
:return:
"""
return repr(self._dict)
def __iter__(self):
"""
:return:
"""
return iter(self._dict)
def __contains__(self, key):
"""
:param key:
:return:
"""
return key in self._dict
def keys(self):
"""
:return:
"""
return self._dict.keys()
def values(self):
"""
:return:
"""
return self._dict.values()
def items(self):
"""
:return:
"""
return self._dict.items()
def iterkeys(self):
"""
:return:
"""
return self._dict.iterkeys()
def itervalues(self):
"""
:return:
"""
return self._dict.itervalues()
def iteritems(self):
"""
:return:
"""
return self._dict.iteritems()
def get(self, key, default=None):
"""
:param key:
:param default:
:return:
"""
return self._dict.get(key, default)
def __getitem__(self, item):
"""
:param item:
:return:
"""
return self._dict[item]
def __hash__(self):
"""
:return:
"""
try:
return self._cached_hash
except AttributeError:
h = self._cached_hash = hash(frozenset(self._dict.items()))
return h
def __eq__(self, other):
"""
:param other:
:return:
"""
return self._dict == other._dict
def __ne__(self, other):
"""
:param other:
:return:
"""
return self._dict != other._dict
def __len__(self):
"""
:return:
"""
return len(self._dict)
def indent_str(s, indent=4):
"""
:param s:
:param indent:
:return:
"""
return "\n".join(indent * " " + i for i in s.splitlines())
def repr_plus(ss, indent=4, sep="\n", prefix=""):
"""
:param ss:
:param indent:
:param sep:
:param prefix:
:return:
"""
if isinstance(ss, basestring):
ss = [ss]
return indent_str(sep.join(prefix + repr(s) for s in ss), indent)
class LockStreamHandler(StreamHandler):
'''Relies on a multiprocessing.Lock to serialize multiprocess writes to a
stream.'''
def __init__(self, lock, stream=sys.stderr):
"""
:param lock:
:param stream:
:return:
"""
self.lock = lock
super(MultiprocessStreamHandler, self).__init__(stream)
def emit(self, record):
"""
Acquire the lock before emitting the record.
:param record:
:return:
"""
self.lock.acquire()
super(LockStreamHandler, self).emit(record)
self.lock.release()
class QueueStreamHandler(StreamHandler):
"""
Relies on a multiprocessing.Lock to serialize multiprocess writes to a
stream.
"""
def __init__(self, queue, stream=sys.stderr):
"""
:param queue:
:param stream:
:return:
"""
self.queue = queue
super(QueueStreamHandler, self).__init__(stream)
def emit(self, record):
"""
Acquire the lock before emitting the record.
:param record:
:return:
"""
self.queue.put(record)
def get_bitarray(packet, fields):
"""
:param packet:
:param fields:
:return:
"""
o = 0
a = BitArray()
for h in fields:
l = packet[h]['length']
a[o:(o + l)] = packet[h]['value']
o += l
return a
|
import sys
import os
from PyQt5 import QtCore, QtWidgets, QtGui, QtMultimedia
from PyQt5 import QtNetwork
from irish_dictionary import irish_dictionary, gaeilge_gaeilge
from audio_grabber import entry_search, related_matches
class Text(QtWidgets.QWidget):
""" This class creates the text widget"""
def __init__(self, parent=None):
super().__init__(parent)
self.text_entry = QtWidgets.QTextEdit(parent)
self.text_entry.setReadOnly(True)
class IrishLabel(QtWidgets.QWidget):
def __init__(self, parent=None):
""" This class creates the Irish language label, entry box, and version switcher """
super().__init__(parent)
self.irish_label = QtWidgets.QLabel("Cuir d'fhocal anseo:")
self.irish_entry = QtWidgets.QLineEdit()
self.english_language_button = QtWidgets.QPushButton("English Version")
self.english_language_button.clicked.connect(lambda: self.irish_to_english())
@staticmethod
def irish_to_english():
""" This method converts the Irish language version to English """
irish_version.hide()
english_version.show()
irish_version.layout().removeWidget(irish_version.text_entry)
english_version.layout().addWidget(english_version.text_entry, 3, 0, 24, 8)
english_version.resize(200, 400)
english_version.center()
class IrishButtons(IrishLabel):
""" this class creates the Irish language buttons"""
def __init__(self, parent=None):
super().__init__(parent)
# Set buttons and enabled status
self.bearla_button = QtWidgets.QPushButton("Béarla")
self.gaeilge_button = QtWidgets.QPushButton("Gaeilge")
self.connacht_button = QtWidgets.QPushButton("Cúige Chonnacht")
self.ulster_button = QtWidgets.QPushButton("Cúige Uladh")
self.munster_button = QtWidgets.QPushButton("Cúige Mhumhan")
self.connacht_button.setEnabled(False)
self.ulster_button.setEnabled(False)
self.munster_button.setEnabled(False)
# Set callbacks
self.bearla_button.clicked.connect(lambda: self.audio_check('English'))
self.gaeilge_button.clicked.connect(lambda: self.audio_check('Irish'))
self.munster_button.clicked.connect(lambda: self.play_audio('Munster'))
self.connacht_button.clicked.connect(lambda: self.play_audio('Connacht'))
self.ulster_button.clicked.connect(lambda: self.play_audio('Ulster'))
def audio_check(self, language):
audio = self.callback(language)
if audio:
self.ulster_button.setEnabled(True)
self.connacht_button.setEnabled(True)
self.munster_button.setEnabled(True)
if not audio:
self.ulster_button.setEnabled(False)
self.connacht_button.setEnabled(False)
self.munster_button.setEnabled(False)
def callback(self, language):
""" Irish version search """
entry = str(self.irish_entry.text()).lower()
entries, suggestions, wordlist, grammatical = irish_dictionary(entry, language, 'gaeilge')
entries2 = None
if language == 'Irish':
entries2 = gaeilge_gaeilge(entry)
audio_exists = entry_search(entry)
if audio_exists:
related = related_matches(entry)
else:
related = 'Níl aon rud ann'
if grammatical is not None:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(grammatical + '\n\n')
for i in entries:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(i + '\n\n')
self.text_entry.moveCursor(QtGui.QTextCursor.End)
if entries2:
self.text_entry.insertPlainText("As Gaeilge:\n\n")
for i in entries2:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(i + '\n\n')
self.text_entry.insertPlainText(suggestions + "\n\nNa focail is déanaí: " + str(wordlist) +
"\n\n" + '(Fuaim) Torthaí gaolmhara:' + str(related) + '\n\n')
self.text_entry.moveCursor(QtGui.QTextCursor.End)
return audio_exists
@staticmethod
def play_audio(dialect):
appdata = os.getenv('APPDATA')
file_names = {'Munster': 'CanM.mp3', 'Connacht': 'CanC.mp3', 'Ulster': 'CanU.mp3'}
if appdata:
url = QtCore.QUrl.fromLocalFile(os.path.abspath(os.path.join(appdata, file_names[dialect])))
else:
url = QtCore.QUrl.fromLocalFile(os.path.abspath(os.path.join("./", file_names[dialect])))
content = QtMultimedia.QMediaContent(url)
player = QtMultimedia.QMediaPlayer()
player.setMedia(content)
player.play()
player.stateChanged.connect(lambda: player.disconnect())
class IrishVersion(IrishButtons, Text):
""" This class brings together all the Irish version widgets and
lays them out in the correct order. Also controls window title and maximize button
"""
def __init__(self, parent=None):
super().__init__(parent)
grid = QtWidgets.QGridLayout()
grid.setSpacing(5)
grid.addWidget(self.irish_label, 0, 0)
grid.addWidget(self.irish_entry, 0, 1, 1, 4)
grid.addWidget(self.english_language_button, 0, 6)
grid.addWidget(self.bearla_button, 1, 2)
grid.addWidget(self.gaeilge_button, 1, 4)
grid.addWidget(self.ulster_button, 2, 2)
grid.addWidget(self.connacht_button, 2, 3)
grid.addWidget(self.munster_button, 2, 4)
self.setLayout(grid)
self.setWindowFlags(QtCore.Qt.WindowMinimizeButtonHint)
self.setWindowFlags(QtCore.Qt.WindowCloseButtonHint)
self.setWindowTitle("Foclóir")
self.resize(200, 400)
def center(self):
qr = self.frameGeometry()
cp = QtWidgets.QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
class EnglishLabel(QtWidgets.QWidget):
""" This class Creates English labels"""
def __init__(self, parent=None):
super().__init__(parent)
self.english_label = QtWidgets.QLabel("Enter your word here:")
self.english_entry = QtWidgets.QLineEdit()
self.irish_language_button = QtWidgets.QPushButton("Leagan Gaeilge")
self.irish_language_button.clicked.connect(lambda: self.english_to_irish())
@staticmethod
def english_to_irish():
""" This method converts the English language version to Irish"""
english_version.hide()
global irish_version
irish_version = IrishVersion()
irish_version.show()
english_version.layout().removeWidget(english_version.text_entry)
irish_version.layout().addWidget(irish_version.text_entry, 3, 0, 24, 8)
irish_version.resize(200, 400)
irish_version.center()
class EnglishButtons(EnglishLabel):
""" This class creates the English version buttons"""
def __init__(self, parent=None):
super().__init__(parent)
# Define buttons
self.english_button = QtWidgets.QPushButton("English")
self.irish_button = QtWidgets.QPushButton("Irish")
self.audio = False # Initial audio setting
self.ulster_button = QtWidgets.QPushButton("Ulster Dialect")
self.connacht_button = QtWidgets.QPushButton("Connacht Dialect")
self.munster_button = QtWidgets.QPushButton("Munster Dialect")
# Define Callback procedures
self.english_button.clicked.connect(lambda: self.audio_check("English"))
self.irish_button.clicked.connect(lambda: self.audio_check('Irish'))
self.munster_button.clicked.connect(lambda: self.play_audio('Munster'))
self.connacht_button.clicked.connect(lambda: self.play_audio('Connacht'))
self.ulster_button.clicked.connect(lambda: self.play_audio('Ulster'))
# Initial disabling of audio buttons
self.ulster_button.setEnabled(False)
self.munster_button.setEnabled(False)
self.connacht_button.setEnabled(False)
def audio_check(self, language):
""" Runs callback which prints all entries, suggestions, grammatical forms, etc. Callback also determines if
an audio recording exists for the word in <language>. If it doesn't, it disables audio buttons. If audio exists,
it enables buttons.
"""
self.audio = self.callback(language)
if self.audio:
self.ulster_button.setEnabled(True)
self.connacht_button.setEnabled(True)
self.munster_button.setEnabled(True)
if not self.audio:
self.ulster_button.setEnabled(False)
self.connacht_button.setEnabled(False)
self.munster_button.setEnabled(False)
def callback(self, language):
""" Callback function that prints entries, suggestions, etc. and returns a boolean for whether the word(s)
contain(s) audio."""
entry = str(self.english_entry.text()).lower()
entries, suggestions, wordlist, grammatical = irish_dictionary(entry, language, 'english')
entries2 = None
if language == 'Irish':
entries2 = gaeilge_gaeilge(entry)
audio_exists = entry_search(entry)
if audio_exists:
related = related_matches(entry)
else:
related = 'None'
if grammatical is not None:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(grammatical + '\n\n')
for i in entries:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(i + '\n\n')
if entries2:
self.text_entry.insertPlainText("In Irish:\n\n")
for i in entries2:
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(i + '\n\n')
self.text_entry.moveCursor(QtGui.QTextCursor.End)
self.text_entry.insertPlainText(suggestions + "\n\nRecently used words: " + str(wordlist) +
"\n\n" + 'Related Audio Matches: ' + str(related) + '\n\n')
self.text_entry.moveCursor(QtGui.QTextCursor.End)
return audio_exists
@staticmethod
def play_audio(dialect):
appdata = os.getenv('APPDATA')
file_names = {'Munster': 'CanM.mp3', 'Connacht': 'CanC.mp3', 'Ulster': 'CanU.mp3'}
if appdata:
url = QtCore.QUrl.fromLocalFile(os.path.abspath(os.path.join(appdata, file_names[dialect])))
else:
url = QtCore.QUrl.fromLocalFile(os.path.abspath(os.path.join("./", file_names[dialect])))
content = QtMultimedia.QMediaContent(url)
player = QtMultimedia.QMediaPlayer()
player.setMedia(content)
player.play()
player.stateChanged.connect(lambda: player.disconnect())
class EnglishVersion(EnglishButtons, Text):
""" This class brings together all the English version widgets and lays them out in the correct
order. Also controls the English version window title and disables the maximize button
"""
def __init__(self, parent=None):
super().__init__(parent)
grid = QtWidgets.QGridLayout()
grid.setSpacing(5)
grid.addWidget(self.english_label, 0, 0)
grid.addWidget(self.english_entry, 0, 1, 1, 4)
grid.addWidget(self.irish_language_button, 0, 6)
grid.addWidget(self.english_button, 1, 2)
grid.addWidget(self.irish_button, 1, 4)
grid.addWidget(self.ulster_button, 2, 2)
grid.addWidget(self.connacht_button, 2, 3)
grid.addWidget(self.munster_button, 2, 4)
grid.addWidget(self.text_entry, 3, 0, 24, 8)
self.setLayout(grid)
self.setWindowFlags(QtCore.Qt.WindowMinimizeButtonHint)
self.setWindowFlags(QtCore.Qt.WindowCloseButtonHint)
self.setWindowTitle("teanglann.ie Searcher")
self.resize(200, 400)
def center(self):
qr = self.frameGeometry()
cp = QtWidgets.QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def main():
app = QtWidgets.QApplication(sys.argv)
global english_version
english_version = EnglishVersion()
english_version.show()
english_version.center()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
from HowOldWebsite.estimators.estimator_sex import EstimatorSex
from HowOldWebsite.models import RecordSex
__author__ = 'Hao Yu'
def sex_estimate(database_face_array, feature_jar):
success = False
database_record = None
try:
n_faces = len(database_face_array)
result_estimated = __do_estimate(feature_jar, n_faces)
database_record = \
__do_save_to_database(database_face_array, result_estimated)
success = True
except Exception as e:
# print(e)
pass
return success, database_record
def __do_estimate(feature_jar, n_faces):
feature = EstimatorSex.feature_combine(feature_jar)
feature = EstimatorSex.feature_reduce(feature)
result = EstimatorSex.estimate(feature)
return result
def __do_save_to_database(database_face, sex):
database_record = []
for ith in range(len(database_face)):
record = RecordSex(original_face=database_face[ith],
value_predict=sex[ith])
database_record.append(record)
return database_record
|
from FaustBot.Communication import Connection
from FaustBot.Model.RemoteUser import RemoteUser
from FaustBot.Modules.MagicNumberObserverPrototype import MagicNumberObserverPrototype
from FaustBot.Modules.ModuleType import ModuleType
from FaustBot.Modules.PingObserverPrototype import PingObserverPrototype
from FaustBot.Modules.UserList import UserList
class WhoObserver(MagicNumberObserverPrototype, PingObserverPrototype):
@staticmethod
def cmd():
return None
@staticmethod
def help():
return None
def __init__(self, user_list: UserList):
super().__init__()
self.user_list = user_list
self.pings_seen = 1
self.pending_whos = []
@staticmethod
def get_module_types():
return [ModuleType.ON_MAGIC_NUMBER, ModuleType.ON_PING]
def update_on_magic_number(self, data, connection):
if data['number'] == '352': # RPL_WHOREPLY
self.input_who(data, connection)
elif data['number'] == '315': # RPL_ENDOFWHO
self.end_who()
def input_who(self, data, connection: Connection):
# target #channel user host server nick status :0 gecos
target, channel, user, host, server, nick, *ign = data['arguments'].split(' ')
self.pending_whos.append(RemoteUser(nick, user, host))
def end_who(self):
self.user_list.clear_list()
for remuser in self.pending_whos:
self.user_list.add_user(remuser)
self.pending_whos = []
def update_on_ping(self, data, connection: Connection):
if self.pings_seen % 90 == 0: # 90 * 2 min = 3 Stunden
connection.raw_send('WHO ' + connection.details.get_channel())
self.pings_seen += 1
|
__all__ = ['pleth_analysis', 'ekg_analysis']
|
import numpy as np
from nose.tools import assert_raises
from horton import *
def test_typecheck():
m = IOData(coordinates=np.array([[1, 2, 3], [2, 3, 1]]))
assert issubclass(m.coordinates.dtype.type, float)
assert not hasattr(m, 'numbers')
m = IOData(numbers=np.array([2, 3]), coordinates=np.array([[1, 2, 3], [2, 3, 1]]))
m = IOData(numbers=np.array([2.0, 3.0]), pseudo_numbers=np.array([1, 1]), coordinates=np.array([[1, 2, 3], [2, 3, 1]]))
assert issubclass(m.numbers.dtype.type, int)
assert issubclass(m.pseudo_numbers.dtype.type, float)
assert hasattr(m, 'numbers')
del m.numbers
assert not hasattr(m, 'numbers')
m = IOData(cube_data=np.array([[[1, 2], [2, 3], [3, 2]]]), coordinates=np.array([[1, 2, 3]]))
with assert_raises(TypeError):
IOData(coordinates=np.array([[1, 2], [2, 3]]))
with assert_raises(TypeError):
IOData(numbers=np.array([[1, 2], [2, 3]]))
with assert_raises(TypeError):
IOData(numbers=np.array([2, 3]), pseudo_numbers=np.array([1]))
with assert_raises(TypeError):
IOData(numbers=np.array([2, 3]), coordinates=np.array([[1, 2, 3]]))
with assert_raises(TypeError):
IOData(cube_data=np.array([[1, 2], [2, 3], [3, 2]]), coordinates=np.array([[1, 2, 3]]))
with assert_raises(TypeError):
IOData(cube_data=np.array([1, 2]))
def test_copy():
fn_fchk = context.get_fn('test/water_sto3g_hf_g03.fchk')
fn_log = context.get_fn('test/water_sto3g_hf_g03.log')
mol1 = IOData.from_file(fn_fchk, fn_log)
mol2 = mol1.copy()
assert mol1 != mol2
vars1 = vars(mol1)
vars2 = vars(mol2)
assert len(vars1) == len(vars2)
for key1, value1 in vars1.iteritems():
assert value1 is vars2[key1]
def test_dm_water_sto3g_hf():
fn_fchk = context.get_fn('test/water_sto3g_hf_g03.fchk')
mol = IOData.from_file(fn_fchk)
dm = mol.get_dm_full()
assert abs(dm.get_element(0, 0) - 2.10503807) < 1e-7
assert abs(dm.get_element(0, 1) - -0.439115917) < 1e-7
assert abs(dm.get_element(1, 1) - 1.93312061) < 1e-7
def test_dm_lih_sto3g_hf():
fn_fchk = context.get_fn('test/li_h_3-21G_hf_g09.fchk')
mol = IOData.from_file(fn_fchk)
dm = mol.get_dm_full()
assert abs(dm.get_element(0, 0) - 1.96589709) < 1e-7
assert abs(dm.get_element(0, 1) - 0.122114249) < 1e-7
assert abs(dm.get_element(1, 1) - 0.0133112081) < 1e-7
assert abs(dm.get_element(10, 10) - 4.23924688E-01) < 1e-7
dm = mol.get_dm_spin()
assert abs(dm.get_element(0, 0) - 1.40210760E-03) < 1e-9
assert abs(dm.get_element(0, 1) - -2.65370873E-03) < 1e-9
assert abs(dm.get_element(1, 1) - 5.38701212E-03) < 1e-9
assert abs(dm.get_element(10, 10) - 4.23889148E-01) < 1e-7
def test_dm_ch3_rohf_g03():
fn_fchk = context.get_fn('test/ch3_rohf_sto3g_g03.fchk')
mol = IOData.from_file(fn_fchk)
olp = mol.obasis.compute_overlap(mol.lf)
dm = mol.get_dm_full()
assert abs(olp.contract_two('ab,ab', dm) - 9) < 1e-6
dm = mol.get_dm_spin()
assert abs(olp.contract_two('ab,ab', dm) - 1) < 1e-6
|
from pyrocko import pz, io, trace
from pyrocko.example import get_example_data
get_example_data('STS2-Generic.polezero.txt')
get_example_data('test.mseed')
zeros, poles, constant = pz.read_sac_zpk('STS2-Generic.polezero.txt')
zeros.append(0.0j)
rest_sts2 = trace.PoleZeroResponse(
zeros=zeros,
poles=poles,
constant=constant)
traces = io.load('test.mseed')
out_traces = list(traces)
for tr in traces:
displacement = tr.transfer(
1000., # rise and fall of time window taper in [s]
(0.001, 0.002, 5., 10.), # frequency domain taper in [Hz]
transfer_function=rest_sts2,
invert=True) # to change to (counts->displacement)
# change channel id, so we can distinguish the traces in a trace viewer.
displacement.set_codes(channel='D'+tr.channel[-1])
out_traces.append(displacement)
io.save(out_traces, 'displacement.mseed')
|
from setup import ExtensionInstaller
def loader():
return ProcessMonitorInstaller()
class ProcessMonitorInstaller(ExtensionInstaller):
def __init__(self):
super(ProcessMonitorInstaller, self).__init__(
version="0.2",
name='pmon',
description='Collect and display process memory usage.',
author="Matthew Wall",
author_email="mwall@users.sourceforge.net",
process_services='user.pmon.ProcessMonitor',
config={
'ProcessMonitor': {
'data_binding': 'pmon_binding',
'process': 'weewxd'},
'DataBindings': {
'pmon_binding': {
'database': 'pmon_sqlite',
'table_name': 'archive',
'manager': 'weewx.manager.DaySummaryManager',
'schema': 'user.pmon.schema'}},
'Databases': {
'pmon_sqlite': {
'database_name': 'pmon.sdb',
'driver': 'weedb.sqlite'}},
'StdReport': {
'pmon': {
'skin': 'pmon',
'HTML_ROOT': 'pmon'}}},
files=[('bin/user', ['bin/user/pmon.py']),
('skins/pmon', ['skins/pmon/skin.conf',
'skins/pmon/index.html.tmpl'])]
)
|
from distutils.core import setup
import py2exe
import os, sys
from glob import glob
import PyQt5
data_files=[('',['C:/Python34/DLLs/sqlite3.dll','C:/Python34/Lib/site-packages/PyQt5/icuuc53.dll','C:/Python34/Lib/site-packages/PyQt5/icudt53.dll','C:/Python34/Lib/site-packages/PyQt5/icuin53.dll','C:/Python34/Lib/site-packages/PyQt5/Qt5Gui.dll','C:/Python34/Lib/site-packages/PyQt5/Qt5Core.dll','C:/Python34/Lib/site-packages/PyQt5/Qt5Widgets.dll']),
('data',['data/configure','data/model.sqlite','data/loading.jpg']),
('platforms',['C:/Python34/Lib/site-packages/PyQt5/plugins/platforms/qminimal.dll','C:/Python34/Lib/site-packages/PyQt5/plugins/platforms/qoffscreen.dll','C:/Python34/Lib/site-packages/PyQt5/plugins/platforms/qwindows.dll'])
]
qt_platform_plugins = [("platforms", glob(PyQt5.__path__[0] + r'\plugins\platforms\*.*'))]
data_files.extend(qt_platform_plugins)
msvc_dlls = [('.', glob(r'''C:/Windows/System32/msvc?100.dll'''))]
data_files.extend(msvc_dlls)
setup(
windows = ["ChemDB.py"],
zipfile = None,
data_files = data_files,
options = {
'py2exe': {
'includes' : ['sip','PyQt5.QtCore','PyQt5.QtGui',"sqlite3",'xlrd','xlwt',"_sqlite3","PyQt5"],
}
},
)
|
from django.core.cache import cache
from django.conf import settings
from django.template.loader import render_to_string
from tendenci.apps.navs.cache import NAV_PRE_KEY
def cache_nav(nav, show_title=False):
"""
Caches a nav's rendered html code
"""
keys = [settings.CACHE_PRE_KEY, NAV_PRE_KEY, str(nav.id)]
key = '.'.join(keys)
value = render_to_string("navs/render_nav.html", {'nav':nav, "show_title": show_title})
cache.set(key, value, 432000) #5 days
return value
def get_nav(id):
"""
Get the nav from the cache.
"""
keys = [settings.CACHE_PRE_KEY, NAV_PRE_KEY, str(id)]
key = '.'.join(keys)
nav = cache.get(key)
return nav
def clear_nav_cache(nav):
"""
Clear nav cache
"""
keys = [settings.CACHE_PRE_KEY, NAV_PRE_KEY, str(nav.id)]
key = '.'.join(keys)
cache.delete(key)
|
from Estructura import espaceado
class Arbol_Sintactico_Abstracto:
def __init__(self,alcance,hijos):
self.hijos = hijos
self.alcance = alcance
self.cont = 1
def imprimir(self,tabulacion):
if (len(self.hijos) > 1):
print tabulacion + "SECUENCIA"
for hijo in self.hijos:
hijo.nivel = 1
hijo.imprimir(espaceado(tabulacion))
def ejecutar(self):
for hijo in self.hijos:
hijo.nivel = 1
hijo.ejecutar()
|
from __future__ import with_statement
import datetime
import os
import sys
import re
import urllib2
import copy
import itertools
import operator
import collections
import sickbeard
from sickbeard import helpers, classes, logger, db
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT#, SEED_POLICY_TIME, SEED_POLICY_RATIO
from sickbeard import tvcache
from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex
from lib.hachoir_parser import createParser
from sickbeard.name_parser.parser import NameParser, InvalidNameException
from sickbeard import scene_numbering
from sickbeard.common import Quality, Overview
class GenericProvider:
NZB = "nzb"
TORRENT = "torrent"
def __init__(self, name):
# these need to be set in the subclass
self.providerType = None
self.name = name
self.url = ''
self.supportsBacklog = False
self.cache = tvcache.TVCache(self)
def getID(self):
return GenericProvider.makeID(self.name)
@staticmethod
def makeID(name):
return re.sub("[^\w\d_]", "_", name.strip().lower())
def imageName(self):
return self.getID() + '.png'
def _checkAuth(self):
return
def isActive(self):
if self.providerType == GenericProvider.NZB and sickbeard.USE_NZBS:
return self.isEnabled()
elif self.providerType == GenericProvider.TORRENT and sickbeard.USE_TORRENTS:
return self.isEnabled()
else:
return False
def isEnabled(self):
"""
This should be overridden and should return the config setting eg. sickbeard.MYPROVIDER
"""
return False
def getResult(self, episodes):
"""
Returns a result of the correct type for this provider
"""
if self.providerType == GenericProvider.NZB:
result = classes.NZBSearchResult(episodes)
elif self.providerType == GenericProvider.TORRENT:
result = classes.TorrentSearchResult(episodes)
else:
result = classes.SearchResult(episodes)
result.provider = self
return result
def getURL(self, url, post_data=None, headers=None):
"""
By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies)
"""
if not headers:
headers = []
data = helpers.getURL(url, post_data, headers)
if not data:
logger.log(u"Error loading " + self.name + " URL: " + url, logger.ERROR)
return None
return data
def downloadResult(self, result):
"""
Save the result to disk.
"""
logger.log(u"Downloading a result from " + self.name+" at " + result.url)
data = self.getURL(result.url)
if data == None:
return False
# use the appropriate watch folder
if self.providerType == GenericProvider.NZB:
saveDir = sickbeard.NZB_DIR
writeMode = 'w'
elif self.providerType == GenericProvider.TORRENT:
saveDir = sickbeard.TORRENT_DIR
writeMode = 'wb'
else:
return False
# use the result name as the filename
file_name = ek.ek(os.path.join, saveDir, helpers.sanitizeFileName(result.name) + '.' + self.providerType)
logger.log(u"Saving to " + file_name, logger.DEBUG)
try:
with open(file_name, writeMode) as fileOut:
fileOut.write(data)
helpers.chmodAsParent(file_name)
except EnvironmentError, e:
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
return False
# as long as it's a valid download then consider it a successful snatch
return self._verify_download(file_name)
def _verify_download(self, file_name=None):
"""
Checks the saved file to see if it was actually valid, if not then consider the download a failure.
"""
# primitive verification of torrents, just make sure we didn't get a text file or something
if self.providerType == GenericProvider.TORRENT:
parser = createParser(file_name)
if parser:
mime_type = parser._getMimeType()
try:
parser.stream._input.close()
except:
pass
if mime_type != 'application/x-bittorrent':
logger.log(u"Result is not a valid torrent file", logger.WARNING)
return False
return True
def searchRSS(self):
self._checkAuth()
self.cache.updateCache()
return self.cache.findNeededEpisodes()
def getQuality(self, item):
"""
Figures out the quality of the given RSS item node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns a Quality value obtained from the node's data
"""
(title, url) = self._get_title_and_url(item) # @UnusedVariable
quality = Quality.sceneQuality(title)
return quality
def _doSearch(self):
return []
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
return []
def _get_episode_search_strings(self, ep_obj):
return []
def _get_title_and_url(self, item):
"""
Retrieves the title and URL data from the item XML node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns: A tuple containing two strings representing title and URL respectively
"""
title = helpers.get_xml_text(item.find('title'))
if title:
title = title.replace(' ', '.')
url = helpers.get_xml_text(item.find('link'))
if url:
url = url.replace('&', '&')
return (title, url)
def findEpisode(self, episode, manualSearch=False):
self._checkAuth()
# XEM episode scene numbering
sceneEpisode = copy.copy(episode)
sceneEpisode.convertToSceneNumbering()
logger.log(u'Searching "%s" for "%s" as "%s"'
% (self.name, episode.prettyName() , sceneEpisode.prettyName()))
self.cache.updateCache()
results = self.cache.searchCache(episode, manualSearch)
logger.log(u"Cache results: " + str(results), logger.DEBUG)
logger.log(u"manualSearch: " + str(manualSearch), logger.DEBUG)
# if we got some results then use them no matter what.
# OR
# return anyway unless we're doing a manual search
if results or not manualSearch:
return results
itemList = []
for cur_search_string in self._get_episode_search_strings(sceneEpisode):
itemList += self._doSearch(cur_search_string, show=episode.show)
for item in itemList:
(title, url) = self._get_title_and_url(item)
# parse the file name
try:
myParser = NameParser(False)
parse_result = myParser.parse(title, True)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
continue
if episode.show.air_by_date:
if parse_result.air_date != episode.airdate:
logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG)
continue
elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG)
continue
quality = self.getQuality(item)
if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
result = self.getResult([episode])
result.url = url
result.name = title
result.quality = quality
result.provider = self
result.content = None
results.append(result)
return results
def findSeasonResults(self, show, season):
itemList = []
results = {}
sceneSeasons = {}
searchSeason = False
# convert wanted seasons and episodes to XEM scene numbering
seasonEp = show.getAllEpisodes(season)
wantedEp = [x for x in seasonEp if show.getOverview(x.status) in (Overview.WANTED, Overview.QUAL)]
map(lambda x: x.convertToSceneNumbering(), wantedEp)
for x in wantedEp: sceneSeasons.setdefault(x.season,[]).append(x)
if wantedEp == seasonEp and not show.air_by_date:
searchSeason = True
for sceneSeason,sceneEpisodes in sceneSeasons.iteritems():
for curString in self._get_season_search_strings(show, str(sceneSeason), sceneEpisodes, searchSeason):
itemList += self._doSearch(curString)
for item in itemList:
(title, url) = self._get_title_and_url(item)
quality = self.getQuality(item)
# parse the file name
try:
myParser = NameParser(False)
parse_result = myParser.parse(title, True)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
continue
if not show.air_by_date:
# this check is meaningless for non-season searches
if (parse_result.season_number != None and parse_result.season_number != season) or (parse_result.season_number == None and season != 1):
logger.log(u"The result " + title + " doesn't seem to be a valid episode for season " + str(season) + ", ignoring", logger.DEBUG)
continue
# we just use the existing info for normal searches
actual_season = season
actual_episodes = parse_result.episode_numbers
else:
if not parse_result.air_by_date:
logger.log(u"This is supposed to be an air-by-date search but the result "+title+" didn't parse as one, skipping it", logger.DEBUG)
continue
myDB = db.DBConnection()
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [show.tvdbid, parse_result.air_date.toordinal()])
if len(sql_results) != 1:
logger.log(u"Tried to look up the date for the episode "+title+" but the database didn't give proper results, skipping it", logger.WARNING)
continue
actual_season = int(sql_results[0]["season"])
actual_episodes = [int(sql_results[0]["episode"])]
# make sure we want the episode
wantEp = True
for epNo in actual_episodes:
if not show.wantEpisode(actual_season, epNo, quality):
wantEp = False
break
if not wantEp:
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
# make a result object
epObj = []
for curEp in actual_episodes:
epObj.append(show.getEpisode(actual_season, curEp))
result = self.getResult(epObj)
result.url = url
result.name = title
result.quality = quality
result.provider = self
result.content = None
if len(epObj) == 1:
epNum = epObj[0].episode
elif len(epObj) > 1:
epNum = MULTI_EP_RESULT
logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers), logger.DEBUG)
elif len(epObj) == 0:
epNum = SEASON_RESULT
result.extraInfo = [show]
logger.log(u"Separating full season result to check for later", logger.DEBUG)
if epNum in results:
results[epNum].append(result)
else:
results[epNum] = [result]
return results
def findPropers(self, search_date=None):
results = self.cache.listPropers(search_date)
return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time'])) for x in results]
class NZBProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.providerType = GenericProvider.NZB
class TorrentProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.providerType = GenericProvider.TORRENT
|
"""
Functional tests of the RabbitMQ Workers
"""
import mock
import json
import unittest
import ADSDeploy.app as app
from ADSDeploy.pipeline.workers import IntegrationTestWorker, \
DatabaseWriterWorker
from ADSDeploy.webapp.views import MiniRabbit
from ADSDeploy.models import Base, Deployment
RABBITMQ_URL = 'amqp://guest:guest@172.17.0.1:6672/adsdeploy_test?' \
'socket_timeout=10&backpressure_detection=t'
class TestIntegrationTestWorker(unittest.TestCase):
"""
Tests the functionality of the Integration Worker
"""
def setUp(self):
# Create queue
with MiniRabbit(RABBITMQ_URL) as w:
w.make_queue('in', exchange='test')
w.make_queue('out', exchange='test')
w.make_queue('database', exchange='test')
# Create database
app.init_app({
'SQLALCHEMY_URL': 'sqlite://',
'SQLALCHEMY_ECHO': False,
})
Base.metadata.bind = app.session.get_bind()
Base.metadata.create_all()
self.app = app
def tearDown(self):
# Destroy queue
with MiniRabbit(RABBITMQ_URL) as w:
w.delete_queue('in', exchange='test')
w.delete_queue('out', exchange='test')
w.delete_queue('database', exchange='test')
# Destroy database
Base.metadata.drop_all()
self.app.close_app()
@mock.patch('ADSDeploy.pipeline.integration_tester.IntegrationTestWorker.run_test')
def test_workflow_of_integration_worker(self, mock_run_test):
"""
General work flow of the integration worker from receiving a packet,
to finishing with a packet.
"""
# Worker receives a packet, most likely from the deploy worker
# Example packet:
#
# {
# 'application': 'staging',
# 'service': 'adsws',
# 'release': '',
# 'config': {},
# }
#
#
example_packet = {
'application': 'staging',
'service': 'adsws',
'version': 'v1.0.0',
'config': {},
'action': 'test'
}
expected_packet = example_packet.copy()
expected_packet['tested'] = True
# Override the run test returned value. This means the logic of the test
# does not have to be mocked
mock_run_test.return_value = expected_packet
with MiniRabbit(RABBITMQ_URL) as w:
w.publish(route='in', exchange='test', payload=json.dumps(example_packet))
# Worker runs the tests
params = {
'RABBITMQ_URL': RABBITMQ_URL,
'exchange': 'test',
'subscribe': 'in',
'publish': 'out',
'status': 'database',
'TEST_RUN': True
}
test_worker = IntegrationTestWorker(params=params)
test_worker.run()
test_worker.connection.close()
# Worker sends a packet to the next worker
with MiniRabbit(RABBITMQ_URL) as w:
m_in = w.message_count(queue='in')
m_out = w.message_count(queue='out')
p = w.get_packet(queue='out')
self.assertEqual(m_in, 0)
self.assertEqual(m_out, 1)
# Remove values that are not in the starting packet
self.assertTrue(p.pop('tested'))
self.assertEqual(
p,
example_packet
)
@mock.patch('ADSDeploy.pipeline.integration_tester.IntegrationTestWorker.run_test')
def test_db_writes_on_test_pass(self, mocked_run_test):
"""
Check that the database is being written to when a test passes
"""
# Stub data
packet = {
'application': 'adsws',
'environment': 'staging',
'version': 'v1.0.0',
}
expected_packet = packet.copy()
expected_packet['tested'] = True
mocked_run_test.return_value = expected_packet
# Start the IntegrationTester worker
params = {
'RABBITMQ_URL': RABBITMQ_URL,
'exchange': 'test',
'subscribe': 'in',
'publish': 'out',
'status': 'database',
'TEST_RUN': True
}
# Push to rabbitmq
with MiniRabbit(RABBITMQ_URL) as w:
w.publish(route='in', exchange='test', payload=json.dumps(packet))
test_worker = IntegrationTestWorker(params=params)
test_worker.run()
test_worker.connection.close()
# Assert there is a packet on the publish queue
with MiniRabbit(RABBITMQ_URL) as w:
self.assertEqual(w.message_count('out'), 1)
self.assertEqual(w.message_count('database'), 1)
# Start the DB Writer worker
params = {
'RABBITMQ_URL': RABBITMQ_URL,
'exchange': 'test',
'subscribe': 'database',
'TEST_RUN': True
}
db_worker = DatabaseWriterWorker(params=params)
db_worker.app = self.app
db_worker.run()
db_worker.connection.close()
with self.app.session_scope() as session:
all_deployments = session.query(Deployment).all()
self.assertEqual(
len(all_deployments),
1,
msg='More (or less) than 1 deployment entry: {}'
.format(all_deployments)
)
deployment = all_deployments[0]
for key in packet:
self.assertEqual(
packet[key],
getattr(deployment, key)
)
self.assertEqual(deployment.tested, True)
|
from __future__ import unicode_literals
import datetime
from django.db import models
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible # only if you need to support Python 2
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __str__(self):
return self.question_text
def was_published_recently(self):
return self.pub_date >= timezone.now() - datetime.timedelta(days=1)
@python_2_unicode_compatible # only if you need to support Python 2
class Choice(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __str__(self):
return self.choice_text
|
class Solution:
# @param root, a tree link node
# @return nothing
def connect(self, root):
def find_next(parent, child):
parent = parent.next
while parent:
if parent.left:
child.next = parent.left
return
elif parent.right:
child.next = parent.right
return
else:
parent = parent.next
if not root: return
q = [root]
while q:
nxt = []
for node in q:
if node.left:
if node.right:
node.left.next = node.right
else:
find_next(node, node.left)
nxt.append(node.left)
if node.right:
find_next(node, node.right)
nxt.append(node.right)
q = nxt
|
x0 = 1.0
y0 = 0.1
b = 1.0
p = 1.0
r = 1.0
d = 1.0
T = 30
dt = 0.01
noise = 0.1
import modex
log = modex.log()
import random
t=0
x=x0
y=y0
while t<T:
f = b - p*y + random.gauss(0, noise)
g = r*x - d + random.gauss(0, noise)
x += x*f*dt
y += y*g*dt
if x<0: x = 0
if y<0: y = 0
t+=dt
log.time = t
log.x = x
log.y = y
|
class Word:
def __init__(self, data, index):
self.data = data
self.index = index
def printAnagrams(arr):
dupArray = []
size = len(arr)
for i in range(size):
dupArray.append(Word(arr[i], i))
for i in range(size):
dupArray[i].data = ''.join(sorted(dupArray[i].data))
dupArray = sorted(dupArray, key=lambda x: x.data)
for i in range(size):
print arr[dupArray[i].index]
def main():
print "Hello, world"
arr = ["dog", "act", "cat", "god", "tac"]
printAnagrams(arr)
if __name__== '__main__':
main()
|
from optparse import make_option
from optparse import OptionParser
import logging
import contextlib
import datetime
from django.core.management.base import BaseCommand
from django.conf import settings
import dateutil
import netCDF4
from lizard_neerslagradar import netcdf
logger = logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ""
help = "Create a geotiff per timestep from the radar.nc file."
option_list = BaseCommand.option_list + (
make_option(
"--from", action="store", type="string",
dest="from_", default="2011-01-07",
help="Generate geotiffs starting from this datetime. "
"Use a string in the format YYYY-MM-DD HH:MM "
"(fuzzy substrings are allowed)"),
make_option("--skip-existing", action="store_true",
dest="skip_existing", default=False,
help="Skip existing geotiffs"),
)
def handle(self, *args, **options):
parser = OptionParser(option_list=self.option_list)
(options, args) = parser.parse_args()
logger.warn("IGNORED from=%s", options.from_)
logger.warn("IGNORED skip_existing=%s", options.skip_existing)
time_from = dateutil.parser.parse('2011-01-07T00:00:00.000Z')
time_to = dateutil.parser.parse('2011-01-08T00:00:00.000Z')
times_list = [time_from]
if time_to:
interval = datetime.timedelta(minutes=5)
time = time_from
while time < time_to:
time += interval
times_list.append(time)
nc = netCDF4.Dataset(settings.RADAR_NC_PATH, 'r')
with contextlib.closing(nc):
for time in times_list:
try:
path = netcdf.time_2_path(time)
netcdf.mk_geotiff(nc, time, path)
logger.info('Created geotiff for {}'.format(time))
except:
logger.exception(
'While creating geotiff for {}'.format(time))
|
class DrawingDimensioningWorkbench (Workbench):
# Icon generated using by converting linearDimension.svg to xpm format using Gimp
Icon = '''
/* XPM */
static char * linearDimension_xpm[] = {
"32 32 10 1",
" c None",
". c #000000",
"+ c #0008FF",
"@ c #0009FF",
"# c #000AFF",
"$ c #00023D",
"% c #0008F7",
"& c #0008EE",
"* c #000587",
"= c #000001",
". .",
". .",
". .",
". .",
". .",
". .",
". .",
". .",
". .",
". .",
". .",
". .",
". +@@ + .",
". @+@@+ +@@+@ .",
". +@+@@@@@@ @@@@@@@# .",
"$%@@@@@@@@@+@@@@@@@@@@@@@@@@@@&$",
". #@@@@@@@@ #+@@@@@@@@*=",
". @+@@+ +@@@@@ .",
". +@ #@++ .",
". # .",
". .",
". .",
". .",
". .",
". .",
". .",
". .",
". .",
". .",
". .",
". .",
". ."};
'''
MenuText = 'Drawing Dimensioning'
def Initialize(self):
import importlib, os
from dimensioning import __dir__, debugPrint, iconPath
import linearDimension
import linearDimension_stack
import deleteDimension
import circularDimension
import grabPointAdd
import textAdd
import textEdit
import textMove
import escapeDimensioning
import angularDimension
import radiusDimension
import centerLines
import noteCircle
import toleranceAdd
commandslist = [
'dd_linearDimension', #where dd is short-hand for drawing dimensioning
'dd_linearDimensionStack',
'dd_circularDimension',
'dd_radiusDimension',
'dd_angularDimension',
'dd_centerLines',
'dd_centerLine',
'dd_noteCircle',
'dd_grabPoint',
'dd_addText',
'dd_editText',
'dd_moveText',
'dd_addTolerance',
'dd_deleteDimension',
'dd_escapeDimensioning',
]
self.appendToolbar('Drawing Dimensioning', commandslist)
import unfold
import unfold_bending_note
import unfold_export_to_dxf
unfold_cmds = [
'dd_unfold',
'dd_bendingNote',
]
if hasattr(os,'uname') and os.uname()[0] == 'Linux' : #this command only works on Linux systems
unfold_cmds.append('dd_exportToDxf')
self.appendToolbar( 'Drawing Dimensioning Folding', unfold_cmds )
import weldingSymbols
if int( FreeCAD.Version()[1] > 15 ) and int( FreeCAD.Version()[2].split()[0] ) > 5165:
weldingCommandList = ['dd_weldingGroupCommand']
else:
weldingCommandList = weldingSymbols.weldingCmds
self.appendToolbar('Drawing Dimensioning Welding Symbols', weldingCommandList)
self.appendToolbar('Drawing Dimensioning Help', [ 'dd_help' ])
FreeCADGui.addIconPath(iconPath)
FreeCADGui.addPreferencePage( os.path.join( __dir__, 'Resources', 'ui', 'drawing_dimensioing_prefs-base.ui'),'Drawing Dimensioning' )
Gui.addWorkbench(DrawingDimensioningWorkbench())
|
import os, shutil, xbmc, xbmcgui
pDialog = xbmcgui.DialogProgress()
dialog = xbmcgui.Dialog()
Game_Directories = [ "E:\\Games\\", "F:\\Games\\", "G:\\Games\\", "E:\\Applications\\", "F:\\Applications\\", "G:\\Applications\\", "E:\\Homebrew\\", "F:\\Homebrew\\", "G:\\Homebrew\\", "E:\\Apps\\", "F:\\Apps\\", "G:\\Apps\\", "E:\\Ports\\", "F:\\Ports\\", "G:\\Ports\\" ]
for Game_Directories in Game_Directories:
if os.path.isdir( Game_Directories ):
pDialog.create( "PARSING XBOX GAMES","Initializing" )
pDialog.update(0,"Removing _Resources Folders","","This can take some time, please be patient.")
for Items in sorted( os.listdir( Game_Directories ) ):
if os.path.isdir(os.path.join( Game_Directories, Items)):
Game_Directory = os.path.join( Game_Directories, Items )
_Resources = os.path.join( Game_Directory, "_Resources" )
DefaultTBN = os.path.join( Game_Directory, "default.tbn" )
FanartJPG = os.path.join( Game_Directory, "fanart.jpg" )
if os.path.isdir(_Resources):
shutil.rmtree(_Resources)
else:
print "Cannot find: " + _Resources
if os.path.isfile(DefaultTBN):
os.remove(DefaultTBN)
else:
print "Cannot find: " + DefaultTBN
if os.path.isfile(FanartJPG):
os.remove(FanartJPG)
else:
print "Cannot find: " + FanartJPG
pDialog.close()
dialog.ok("COMPLETE","Done, _Resources Folders Removed.")
|
"""
This module provides classes that describe banks of waveforms
"""
import types
import logging
import os.path
import h5py
from copy import copy
import numpy as np
from ligo.lw import table, lsctables, utils as ligolw_utils
import pycbc.waveform
import pycbc.pnutils
import pycbc.waveform.compress
from pycbc import DYN_RANGE_FAC
from pycbc.types import FrequencySeries, zeros
import pycbc.io
from pycbc.io.ligolw import LIGOLWContentHandler
import hashlib
def sigma_cached(self, psd):
""" Cache sigma calculate for use in tandem with the FilterBank class
"""
if not hasattr(self, '_sigmasq'):
from pycbc.opt import LimitedSizeDict
self._sigmasq = LimitedSizeDict(size_limit=2**5)
key = id(psd)
if not hasattr(psd, '_sigma_cached_key'):
psd._sigma_cached_key = {}
if key not in self._sigmasq or id(self) not in psd._sigma_cached_key:
psd._sigma_cached_key[id(self)] = True
# If possible, we precalculate the sigmasq vector for all possible waveforms
if pycbc.waveform.waveform_norm_exists(self.approximant):
if not hasattr(psd, 'sigmasq_vec'):
psd.sigmasq_vec = {}
if self.approximant not in psd.sigmasq_vec:
psd.sigmasq_vec[self.approximant] = \
pycbc.waveform.get_waveform_filter_norm(
self.approximant,
psd,
len(psd),
psd.delta_f,
self.min_f_lower
)
if not hasattr(self, 'sigma_scale'):
# Get an amplitude normalization (mass dependant constant norm)
amp_norm = pycbc.waveform.get_template_amplitude_norm(
self.params, approximant=self.approximant)
amp_norm = 1 if amp_norm is None else amp_norm
self.sigma_scale = (DYN_RANGE_FAC * amp_norm) ** 2.0
curr_sigmasq = psd.sigmasq_vec[self.approximant]
kmin = int(self.f_lower / psd.delta_f)
self._sigmasq[key] = self.sigma_scale * \
(curr_sigmasq[self.end_idx-1] - curr_sigmasq[kmin])
else:
if not hasattr(self, 'sigma_view'):
from pycbc.filter.matchedfilter import get_cutoff_indices
N = (len(self) -1) * 2
kmin, kmax = get_cutoff_indices(
self.min_f_lower or self.f_lower, self.end_frequency,
self.delta_f, N)
self.sslice = slice(kmin, kmax)
self.sigma_view = self[self.sslice].squared_norm() * 4.0 * self.delta_f
if not hasattr(psd, 'invsqrt'):
psd.invsqrt = 1.0 / psd
self._sigmasq[key] = self.sigma_view.inner(psd.invsqrt[self.sslice])
return self._sigmasq[key]
def boolargs_from_apprxstr(approximant_strs):
"""Parses a list of strings specifying an approximant and where that
approximant should be used into a list that can be understood by
FieldArray.parse_boolargs.
Parameters
----------
apprxstr : (list of) string(s)
The strings to parse. Each string should be formatted `APPRX:COND`,
where `APPRX` is the approximant and `COND` is a string specifying
where it should be applied (see `FieldArgs.parse_boolargs` for examples
of conditional strings). The last string in the list may exclude a
conditional argument, which is the same as specifying ':else'.
Returns
-------
boolargs : list
A list of tuples giving the approximant and where to apply them. This
can be passed directly to `FieldArray.parse_boolargs`.
"""
if not isinstance(approximant_strs, list):
approximant_strs = [approximant_strs]
return [tuple(arg.split(':')) for arg in approximant_strs]
def add_approximant_arg(parser, default=None, help=None):
"""Adds an approximant argument to the given parser.
Parameters
----------
parser : ArgumentParser
The argument parser to add the argument to.
default : {None, str}
Specify a default for the approximant argument. Defaults to None.
help : {None, str}
Provide a custom help message. If None, will use a descriptive message
on how to specify the approximant.
"""
if help is None:
help=str("The approximant(s) to use. Multiple approximants to use "
"in different regions may be provided. If multiple "
"approximants are provided, every one but the last must be "
"be followed by a conditional statement defining where that "
"approximant should be used. Conditionals can be any boolean "
"test understood by numpy. For example, 'Apprx:(mtotal > 4) & "
"(mchirp <= 5)' would use approximant 'Apprx' where total mass "
"is > 4 and chirp mass is <= 5. "
"Conditionals are applied in order, with each successive one "
"only applied to regions not covered by previous arguments. "
"For example, `'TaylorF2:mtotal < 4' 'IMRPhenomD:mchirp < 3'` "
"would result in IMRPhenomD being used where chirp mass is < 3 "
"and total mass is >= 4. The last approximant given may use "
"'else' as the conditional or include no conditional. In either "
"case, this will cause the last approximant to be used in any "
"remaning regions after all the previous conditionals have been "
"applied. For the full list of possible parameters to apply "
"conditionals to, see WaveformArray.default_fields(). Math "
"operations may also be used on parameters; syntax is python, "
"with any operation recognized by numpy.")
parser.add_argument("--approximant", nargs='+', type=str, default=default,
metavar='APPRX[:COND]',
help=help)
def parse_approximant_arg(approximant_arg, warray):
"""Given an approximant arg (see add_approximant_arg) and a field
array, figures out what approximant to use for each template in the array.
Parameters
----------
approximant_arg : list
The approximant argument to parse. Should be the thing returned by
ArgumentParser when parsing the argument added by add_approximant_arg.
warray : FieldArray
The array to parse. Must be an instance of a FieldArray, or a class
that inherits from FieldArray.
Returns
-------
array
A numpy array listing the approximants to use for each element in
the warray.
"""
return warray.parse_boolargs(boolargs_from_apprxstr(approximant_arg))[0]
def tuple_to_hash(tuple_to_be_hashed):
"""
Return a hash for a numpy array, avoids native (unsafe) python3 hash function
Parameters
----------
tuple_to_be_hashed: tuple
The tuple which is being hashed
Must be convertible to a numpy array
Returns
-------
int
an integer representation of the hashed array
"""
h = hashlib.blake2b(np.array(tuple_to_be_hashed).tobytes('C'),
digest_size=8)
return np.fromstring(h.digest(), dtype=int)[0]
class TemplateBank(object):
"""Class to provide some basic helper functions and information
about elements of a template bank.
Parameters
----------
filename : string
The name of the file to load. Must end in '.xml[.gz]' or '.hdf'. If an
hdf file, it should have a 'parameters' in its `attrs` which gives a
list of the names of fields to load from the file. If no 'parameters'
are found, all of the top-level groups in the file will assumed to be
parameters (a warning will be printed to stdout in this case). If an
xml file, it must have a `SnglInspiral` table.
approximant : {None, (list of) string(s)}
Specify the approximant(s) for each template in the bank. If None
provided, will try to load the approximant from the file. The
approximant may either be a single string (in which case the same
approximant will be used for all templates) or a list of strings and
conditionals specifying where to use the approximant. See
`boolargs_from_apprxstr` for syntax.
parameters : {None, (list of) sting(s)}
Specify what parameters to load from the file. If None, all of the
parameters in the file (if an xml file, this is all of the columns in
the SnglInspiral table, if an hdf file, this is given by the
parameters attribute in the file). The list may include parameters that
are derived from the file's parameters, or functions thereof. For a
full list of possible parameters, see `WaveformArray.default_fields`.
If a derived parameter is specified, only the parameters needed to
compute that parameter will be loaded from the file. For example, if
`parameters='mchirp'`, then only `mass1, mass2` will be loaded from
the file. Note that derived parameters can only be used if the
needed parameters are in the file; e.g., you cannot use `chi_eff` if
`spin1z`, `spin2z`, `mass1`, and `mass2` are in the input file.
\**kwds :
Any additional keyword arguments are stored to the `extra_args`
attribute.
Attributes
----------
table : WaveformArray
An instance of a WaveformArray containing all of the information about
the parameters of the bank.
has_compressed_waveforms : {False, bool}
True if compressed waveforms are present in the the (hdf) file; False
otherwise.
parameters : tuple
The parameters loaded from the input file. Same as `table.fieldnames`.
indoc : {None, xmldoc}
If an xml file was provided, an in-memory representation of the xml.
Otherwise, None.
filehandler : {None, h5py.File}
If an hdf file was provided, the file handler pointing to the hdf file
(left open after initialization). Otherwise, None.
extra_args : {None, dict}
Any extra keyword arguments that were provided on initialization.
"""
def __init__(self, filename, approximant=None, parameters=None,
**kwds):
self.has_compressed_waveforms = False
ext = os.path.basename(filename)
if ext.endswith(('.xml', '.xml.gz', '.xmlgz')):
self.filehandler = None
self.indoc = ligolw_utils.load_filename(
filename, False, contenthandler=LIGOLWContentHandler)
self.table = table.get_table(
self.indoc, lsctables.SnglInspiralTable.tableName)
self.table = pycbc.io.WaveformArray.from_ligolw_table(self.table,
columns=parameters)
# inclination stored in xml alpha3 column
names = list(self.table.dtype.names)
names = tuple([n if n != 'alpha3' else 'inclination' for n in names])
# low frequency cutoff in xml alpha6 column
names = tuple([n if n!= 'alpha6' else 'f_lower' for n in names])
self.table.dtype.names = names
elif ext.endswith(('hdf', '.h5')):
self.indoc = None
f = h5py.File(filename, 'r')
self.filehandler = f
try:
fileparams = list(f.attrs['parameters'])
except KeyError:
# just assume all of the top-level groups are the parameters
fileparams = list(f.keys())
logging.info("WARNING: no parameters attribute found. "
"Assuming that %s " %(', '.join(fileparams)) +
"are the parameters.")
tmp_params = []
# At this point fileparams might be bytes. Fix if it is
for param in fileparams:
try:
param = param.decode()
tmp_params.append(param)
except AttributeError:
tmp_params.append(param)
fileparams = tmp_params
# use WaveformArray's syntax parser to figure out what fields
# need to be loaded
if parameters is None:
parameters = fileparams
common_fields = list(pycbc.io.WaveformArray(1,
names=parameters).fieldnames)
add_fields = list(set(parameters) &
(set(fileparams) - set(common_fields)))
# load
dtype = []
data = {}
for key in common_fields+add_fields:
data[key] = f[key][:]
dtype.append((key, data[key].dtype))
num = f[fileparams[0]].size
self.table = pycbc.io.WaveformArray(num, dtype=dtype)
for key in data:
self.table[key] = data[key]
# add the compressed waveforms, if they exist
self.has_compressed_waveforms = 'compressed_waveforms' in f
else:
raise ValueError("Unsupported template bank file extension %s" %(
ext))
# if approximant is specified, override whatever was in the file
# (if anything was in the file)
if approximant is not None:
# get the approximant for each template
dtype = h5py.string_dtype(encoding='utf-8')
apprxs = np.array(self.parse_approximant(approximant),
dtype=dtype)
if 'approximant' not in self.table.fieldnames:
self.table = self.table.add_fields(apprxs, 'approximant')
else:
self.table['approximant'] = apprxs
self.extra_args = kwds
self.ensure_hash()
@property
def parameters(self):
return self.table.fieldnames
def ensure_hash(self):
"""Ensure that there is a correctly populated template_hash.
Check for a correctly populated template_hash and create if it doesn't
already exist.
"""
fields = self.table.fieldnames
if 'template_hash' in fields:
return
# The fields to use in making a template hash
hash_fields = ['mass1', 'mass2', 'inclination',
'spin1x', 'spin1y', 'spin1z',
'spin2x', 'spin2y', 'spin2z',]
fields = [f for f in hash_fields if f in fields]
template_hash = np.array([tuple_to_hash(v) for v in zip(*[self.table[p]
for p in fields])])
if not np.unique(template_hash).size == template_hash.size:
raise RuntimeError("Some template hashes clash. This should not "
"happen.")
self.table = self.table.add_fields(template_hash, 'template_hash')
def write_to_hdf(self, filename, start_index=None, stop_index=None,
force=False, skip_fields=None,
write_compressed_waveforms=True):
"""Writes self to the given hdf file.
Parameters
----------
filename : str
The name of the file to write to. Must end in '.hdf'.
start_index : If a specific slice of the template bank is to be
written to the hdf file, this would specify the index of the
first template in the slice
stop_index : If a specific slice of the template bank is to be
written to the hdf file, this would specify the index of the
last template in the slice
force : {False, bool}
If the file already exists, it will be overwritten if True.
Otherwise, an OSError is raised if the file exists.
skip_fields : {None, (list of) strings}
Do not write the given fields to the hdf file. Default is None,
in which case all fields in self.table.fieldnames are written.
write_compressed_waveforms : {True, bool}
Write compressed waveforms to the output (hdf) file if this is
True, which is the default setting. If False, do not write the
compressed waveforms group, but only the template parameters to
the output file.
Returns
-------
h5py.File
The file handler to the output hdf file (left open).
"""
if not filename.endswith('.hdf'):
raise ValueError("Unrecoginized file extension")
if os.path.exists(filename) and not force:
raise IOError("File %s already exists" %(filename))
f = h5py.File(filename, 'w')
parameters = self.parameters
if skip_fields is not None:
if not isinstance(skip_fields, list):
skip_fields = [skip_fields]
parameters = [p for p in parameters if p not in skip_fields]
# save the parameters
f.attrs['parameters'] = parameters
write_tbl = self.table[start_index:stop_index]
for p in parameters:
f[p] = write_tbl[p]
if write_compressed_waveforms and self.has_compressed_waveforms:
for tmplt_hash in write_tbl.template_hash:
compressed_waveform = pycbc.waveform.compress.CompressedWaveform.from_hdf(
self.filehandler, tmplt_hash,
load_now=True)
compressed_waveform.write_to_hdf(f, tmplt_hash)
return f
def end_frequency(self, index):
""" Return the end frequency of the waveform at the given index value
"""
if hasattr(self.table[index], 'f_final'):
return self.table[index].f_final
return pycbc.waveform.get_waveform_end_frequency(
self.table[index],
approximant=self.approximant(index),
**self.extra_args)
def parse_approximant(self, approximant):
"""Parses the given approximant argument, returning the approximant to
use for each template in self. This is done by calling
`parse_approximant_arg` using self's table as the array; see that
function for more details."""
return parse_approximant_arg(approximant, self.table)
def approximant(self, index):
""" Return the name of the approximant ot use at the given index
"""
if 'approximant' not in self.table.fieldnames:
raise ValueError("approximant not found in input file and no "
"approximant was specified on initialization")
apx = self.table["approximant"][index]
if hasattr(apx, 'decode'):
apx = apx.decode()
return apx
def __len__(self):
return len(self.table)
def template_thinning(self, inj_filter_rejector):
"""Remove templates from bank that are far from all injections."""
if not inj_filter_rejector.enabled or \
inj_filter_rejector.chirp_time_window is None:
# Do nothing!
return
injection_parameters = inj_filter_rejector.injection_params.table
fref = inj_filter_rejector.f_lower
threshold = inj_filter_rejector.chirp_time_window
m1= self.table['mass1']
m2= self.table['mass2']
tau0_temp, _ = pycbc.pnutils.mass1_mass2_to_tau0_tau3(m1, m2, fref)
indices = []
sort = tau0_temp.argsort()
tau0_temp = tau0_temp[sort]
for inj in injection_parameters:
tau0_inj, _ = \
pycbc.pnutils.mass1_mass2_to_tau0_tau3(inj.mass1, inj.mass2,
fref)
lid = np.searchsorted(tau0_temp, tau0_inj - threshold)
rid = np.searchsorted(tau0_temp, tau0_inj + threshold)
inj_indices = sort[lid:rid]
indices.append(inj_indices)
indices_combined = np.concatenate(indices)
indices_unique= np.unique(indices_combined)
self.table = self.table[indices_unique]
def ensure_standard_filter_columns(self, low_frequency_cutoff=None):
""" Initialize FilterBank common fields
Parameters
----------
low_frequency_cutoff: {float, None}, Optional
A low frequency cutoff which overrides any given within the
template bank file.
"""
# Make sure we have a template duration field
if not hasattr(self.table, 'template_duration'):
self.table = self.table.add_fields(np.zeros(len(self.table),
dtype=np.float32), 'template_duration')
# Make sure we have a f_lower field
if low_frequency_cutoff is not None:
if not hasattr(self.table, 'f_lower'):
vec = np.zeros(len(self.table), dtype=np.float32)
self.table = self.table.add_fields(vec, 'f_lower')
self.table['f_lower'][:] = low_frequency_cutoff
self.min_f_lower = min(self.table['f_lower'])
if self.f_lower is None and self.min_f_lower == 0.:
raise ValueError('Invalid low-frequency cutoff settings')
class LiveFilterBank(TemplateBank):
def __init__(self, filename, sample_rate, minimum_buffer,
approximant=None, increment=8, parameters=None,
low_frequency_cutoff=None,
**kwds):
self.increment = increment
self.filename = filename
self.sample_rate = sample_rate
self.minimum_buffer = minimum_buffer
self.f_lower = low_frequency_cutoff
super(LiveFilterBank, self).__init__(filename, approximant=approximant,
parameters=parameters, **kwds)
self.ensure_standard_filter_columns(low_frequency_cutoff=low_frequency_cutoff)
self.param_lookup = {}
for i, p in enumerate(self.table):
key = (p.mass1, p.mass2, p.spin1z, p.spin2z)
assert(key not in self.param_lookup) # Uh, oh, template confusion!
self.param_lookup[key] = i
def round_up(self, num):
"""Determine the length to use for this waveform by rounding.
Parameters
----------
num : int
Proposed size of waveform in seconds
Returns
-------
size: int
The rounded size to use for the waveform buffer in seconds. This
is calculaed using an internal `increment` attribute, which determines
the discreteness of the rounding.
"""
inc = self.increment
size = np.ceil(num / self.sample_rate / inc) * self.sample_rate * inc
return size
def getslice(self, sindex):
instance = copy(self)
instance.table = self.table[sindex]
return instance
def id_from_param(self, param_tuple):
"""Get the index of this template based on its param tuple
Parameters
----------
param_tuple : tuple
Tuple of the parameters which uniquely identify this template
Returns
--------
index : int
The ordered index that this template has in the template bank.
"""
return self.param_lookup[param_tuple]
def __getitem__(self, index):
if isinstance(index, slice):
return self.getslice(index)
return self.get_template(index)
def get_template(self, index, min_buffer=None):
approximant = self.approximant(index)
f_end = self.end_frequency(index)
flow = self.table[index].f_lower
# Determine the length of time of the filter, rounded up to
# nearest power of two
if min_buffer is None:
min_buffer = self.minimum_buffer
min_buffer += 0.5
from pycbc.waveform.waveform import props
p = props(self.table[index])
p.pop('approximant')
buff_size = pycbc.waveform.get_waveform_filter_length_in_time(approximant, **p)
tlen = self.round_up((buff_size + min_buffer) * self.sample_rate)
flen = int(tlen / 2 + 1)
delta_f = self.sample_rate / float(tlen)
if f_end is None or f_end >= (flen * delta_f):
f_end = (flen-1) * delta_f
logging.info("Generating %s, %ss, %i, starting from %s Hz",
approximant, 1.0/delta_f, index, flow)
# Get the waveform filter
distance = 1.0 / DYN_RANGE_FAC
htilde = pycbc.waveform.get_waveform_filter(
zeros(flen, dtype=np.complex64), self.table[index],
approximant=approximant, f_lower=flow, f_final=f_end,
delta_f=delta_f, delta_t=1.0/self.sample_rate, distance=distance,
**self.extra_args)
# If available, record the total duration (which may
# include ringdown) and the duration up to merger since they will be
# erased by the type conversion below.
ttotal = template_duration = -1
time_offset = None
if hasattr(htilde, 'length_in_time'):
ttotal = htilde.length_in_time
if hasattr(htilde, 'chirp_length'):
template_duration = htilde.chirp_length
if hasattr(htilde, 'time_offset'):
time_offset = htilde.time_offset
self.table[index].template_duration = template_duration
htilde = htilde.astype(np.complex64)
htilde.f_lower = flow
htilde.min_f_lower = self.min_f_lower
htilde.end_idx = int(f_end / htilde.delta_f)
htilde.params = self.table[index]
htilde.chirp_length = template_duration
htilde.length_in_time = ttotal
htilde.approximant = approximant
htilde.end_frequency = f_end
if time_offset:
htilde.time_offset = time_offset
# Add sigmasq as a method of this instance
htilde.sigmasq = types.MethodType(sigma_cached, htilde)
htilde.id = self.id_from_param((htilde.params.mass1,
htilde.params.mass2,
htilde.params.spin1z,
htilde.params.spin2z))
return htilde
class FilterBank(TemplateBank):
def __init__(self, filename, filter_length, delta_f, dtype,
out=None, max_template_length=None,
approximant=None, parameters=None,
enable_compressed_waveforms=True,
low_frequency_cutoff=None,
waveform_decompression_method=None,
**kwds):
self.out = out
self.dtype = dtype
self.f_lower = low_frequency_cutoff
self.filename = filename
self.delta_f = delta_f
self.N = (filter_length - 1 ) * 2
self.delta_t = 1.0 / (self.N * self.delta_f)
self.filter_length = filter_length
self.max_template_length = max_template_length
self.enable_compressed_waveforms = enable_compressed_waveforms
self.waveform_decompression_method = waveform_decompression_method
super(FilterBank, self).__init__(filename, approximant=approximant,
parameters=parameters, **kwds)
self.ensure_standard_filter_columns(low_frequency_cutoff=low_frequency_cutoff)
def get_decompressed_waveform(self, tempout, index, f_lower=None,
approximant=None, df=None):
"""Returns a frequency domain decompressed waveform for the template
in the bank corresponding to the index taken in as an argument. The
decompressed waveform is obtained by interpolating in frequency space,
the amplitude and phase points for the compressed template that are
read in from the bank."""
from pycbc.waveform.waveform import props
from pycbc.waveform import get_waveform_filter_length_in_time
# Get the template hash corresponding to the template index taken in as argument
tmplt_hash = self.table.template_hash[index]
# Read the compressed waveform from the bank file
compressed_waveform = pycbc.waveform.compress.CompressedWaveform.from_hdf(
self.filehandler, tmplt_hash,
load_now=True)
# Get the interpolation method to be used to decompress the waveform
if self.waveform_decompression_method is not None :
decompression_method = self.waveform_decompression_method
else :
decompression_method = compressed_waveform.interpolation
logging.info("Decompressing waveform using %s", decompression_method)
if df is not None :
delta_f = df
else :
delta_f = self.delta_f
# Create memory space for writing the decompressed waveform
decomp_scratch = FrequencySeries(tempout[0:self.filter_length], delta_f=delta_f, copy=False)
# Get the decompressed waveform
hdecomp = compressed_waveform.decompress(out=decomp_scratch, f_lower=f_lower, interpolation=decompression_method)
p = props(self.table[index])
p.pop('approximant')
try:
tmpltdur = self.table[index].template_duration
except AttributeError:
tmpltdur = None
if tmpltdur is None or tmpltdur==0.0 :
tmpltdur = get_waveform_filter_length_in_time(approximant, **p)
hdecomp.chirp_length = tmpltdur
hdecomp.length_in_time = hdecomp.chirp_length
return hdecomp
def generate_with_delta_f_and_max_freq(self, t_num, max_freq, delta_f,
low_frequency_cutoff=None,
cached_mem=None):
"""Generate the template with index t_num using custom length."""
approximant = self.approximant(t_num)
# Don't want to use INTERP waveforms in here
if approximant.endswith('_INTERP'):
approximant = approximant.replace('_INTERP', '')
# Using SPAtmplt here is bad as the stored cbrt and logv get
# recalculated as we change delta_f values. Fall back to TaylorF2
# in lalsimulation.
if approximant == 'SPAtmplt':
approximant = 'TaylorF2'
if cached_mem is None:
wav_len = int(max_freq / delta_f) + 1
cached_mem = zeros(wav_len, dtype=np.complex64)
if self.has_compressed_waveforms and self.enable_compressed_waveforms:
htilde = self.get_decompressed_waveform(cached_mem, t_num,
f_lower=low_frequency_cutoff,
approximant=approximant,
df=delta_f)
else :
htilde = pycbc.waveform.get_waveform_filter(
cached_mem, self.table[t_num], approximant=approximant,
f_lower=low_frequency_cutoff, f_final=max_freq, delta_f=delta_f,
distance=1./DYN_RANGE_FAC, delta_t=1./(2.*max_freq))
return htilde
def __getitem__(self, index):
# Make new memory for templates if we aren't given output memory
if self.out is None:
tempout = zeros(self.filter_length, dtype=self.dtype)
else:
tempout = self.out
approximant = self.approximant(index)
f_end = self.end_frequency(index)
if f_end is None or f_end >= (self.filter_length * self.delta_f):
f_end = (self.filter_length-1) * self.delta_f
# Find the start frequency, if variable
f_low = find_variable_start_frequency(approximant,
self.table[index],
self.f_lower,
self.max_template_length)
logging.info('%s: generating %s from %s Hz' % (index, approximant, f_low))
# Clear the storage memory
poke = tempout.data # pylint:disable=unused-variable
tempout.clear()
# Get the waveform filter
distance = 1.0 / DYN_RANGE_FAC
if self.has_compressed_waveforms and self.enable_compressed_waveforms:
htilde = self.get_decompressed_waveform(tempout, index, f_lower=f_low,
approximant=approximant, df=None)
else :
htilde = pycbc.waveform.get_waveform_filter(
tempout[0:self.filter_length], self.table[index],
approximant=approximant, f_lower=f_low, f_final=f_end,
delta_f=self.delta_f, delta_t=self.delta_t, distance=distance,
**self.extra_args)
# If available, record the total duration (which may
# include ringdown) and the duration up to merger since they will be
# erased by the type conversion below.
ttotal = template_duration = None
if hasattr(htilde, 'length_in_time'):
ttotal = htilde.length_in_time
if hasattr(htilde, 'chirp_length'):
template_duration = htilde.chirp_length
self.table[index].template_duration = template_duration
htilde = htilde.astype(self.dtype)
htilde.f_lower = f_low
htilde.min_f_lower = self.min_f_lower
htilde.end_idx = int(f_end / htilde.delta_f)
htilde.params = self.table[index]
htilde.chirp_length = template_duration
htilde.length_in_time = ttotal
htilde.approximant = approximant
htilde.end_frequency = f_end
# Add sigmasq as a method of this instance
htilde.sigmasq = types.MethodType(sigma_cached, htilde)
htilde._sigmasq = {}
return htilde
def find_variable_start_frequency(approximant, parameters, f_start, max_length,
delta_f = 1):
""" Find a frequency value above the starting frequency that results in a
waveform shorter than max_length.
"""
if (f_start is None):
f = parameters.f_lower
elif (max_length is not None):
l = max_length + 1
f = f_start - delta_f
while l > max_length:
f += delta_f
l = pycbc.waveform.get_waveform_filter_length_in_time(approximant,
parameters, f_lower=f)
else :
f = f_start
return f
class FilterBankSkyMax(TemplateBank):
def __init__(self, filename, filter_length, delta_f,
dtype, out_plus=None, out_cross=None,
max_template_length=None, parameters=None,
low_frequency_cutoff=None, **kwds):
self.out_plus = out_plus
self.out_cross = out_cross
self.dtype = dtype
self.f_lower = low_frequency_cutoff
self.filename = filename
self.delta_f = delta_f
self.N = (filter_length - 1 ) * 2
self.delta_t = 1.0 / (self.N * self.delta_f)
self.filter_length = filter_length
self.max_template_length = max_template_length
super(FilterBankSkyMax, self).__init__(filename, parameters=parameters,
**kwds)
self.ensure_standard_filter_columns(low_frequency_cutoff=low_frequency_cutoff)
def __getitem__(self, index):
# Make new memory for templates if we aren't given output memory
if self.out_plus is None:
tempoutplus = zeros(self.filter_length, dtype=self.dtype)
else:
tempoutplus = self.out_plus
if self.out_cross is None:
tempoutcross = zeros(self.filter_length, dtype=self.dtype)
else:
tempoutcross = self.out_cross
approximant = self.approximant(index)
# Get the end of the waveform if applicable (only for SPAtmplt atm)
f_end = self.end_frequency(index)
if f_end is None or f_end >= (self.filter_length * self.delta_f):
f_end = (self.filter_length-1) * self.delta_f
# Find the start frequency, if variable
f_low = find_variable_start_frequency(approximant,
self.table[index],
self.f_lower,
self.max_template_length)
logging.info('%s: generating %s from %s Hz', index, approximant, f_low)
# What does this do???
poke1 = tempoutplus.data # pylint:disable=unused-variable
poke2 = tempoutcross.data # pylint:disable=unused-variable
# Clear the storage memory
tempoutplus.clear()
tempoutcross.clear()
# Get the waveform filter
distance = 1.0 / DYN_RANGE_FAC
hplus, hcross = pycbc.waveform.get_two_pol_waveform_filter(
tempoutplus[0:self.filter_length],
tempoutcross[0:self.filter_length], self.table[index],
approximant=approximant, f_lower=f_low,
f_final=f_end, delta_f=self.delta_f, delta_t=self.delta_t,
distance=distance, **self.extra_args)
if hasattr(hplus, 'chirp_length') and hplus.chirp_length is not None:
self.table[index].template_duration = hplus.chirp_length
hplus = hplus.astype(self.dtype)
hcross = hcross.astype(self.dtype)
hplus.f_lower = f_low
hcross.f_lower = f_low
hplus.min_f_lower = self.min_f_lower
hcross.min_f_lower = self.min_f_lower
hplus.end_frequency = f_end
hcross.end_frequency = f_end
hplus.end_idx = int(hplus.end_frequency / hplus.delta_f)
hcross.end_idx = int(hplus.end_frequency / hplus.delta_f)
hplus.params = self.table[index]
hcross.params = self.table[index]
hplus.approximant = approximant
hcross.approximant = approximant
# Add sigmasq as a method of this instance
hplus.sigmasq = types.MethodType(sigma_cached, hplus)
hplus._sigmasq = {}
hcross.sigmasq = types.MethodType(sigma_cached, hcross)
hcross._sigmasq = {}
return hplus, hcross
__all__ = ('sigma_cached', 'boolargs_from_apprxstr', 'add_approximant_arg',
'parse_approximant_arg', 'tuple_to_hash', 'TemplateBank',
'LiveFilterBank', 'FilterBank', 'find_variable_start_frequency',
'FilterBankSkyMax')
|
"""
scraping
the utility functions for the actual web scraping
"""
import ssl
import datetime
import requests
import re
QUERY_URL = "https://www.wawa.com/Handlers/LocationByStoreNumber.ashx"
POSSIBLE_STORE_NUMS = list(range(5000, 6000))
POSSIBLE_STORE_NUMS.extend(list(range(0, 1000)))
POSSIBLE_STORE_NUMS.extend(list(range(8000, 9000)))
GAS_TYPES = ["diesel", "plus", "unleaded", "premium"]
def parse_gas_prices(in_location):
"""
Breaks open the json for the gas prices
:param in_location: The Wawa location we are looking at (dict)
:return: The gas price info (dict)
"""
out_data = {}
try:
fuel_data = in_location["fuelTypes"]
for ft in fuel_data:
lowered = ft["description"].lower()
if lowered in GAS_TYPES:
out_data[lowered + "_price"] = ft["price"]
# no gas sold at this Wawa
except KeyError:
for gt in GAS_TYPES:
out_data[gt + "_price"] = ""
return out_data
def camel_to_underscore(in_string):
"""
Basic function that converts a camel-cased word to use underscores
:param in_string: The camel-cased string (str)
:return: The underscore'd string (str)
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', in_string)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def parse_amenities(in_location):
"""
Breaks open the json for the amenities offered at the Wawa location
:param in_location: The Wawa location (dict)
:return: The amenity info (dict)
"""
out_data = {}
for amenity, value in in_location["amenities"].items():
out_data["has_" + camel_to_underscore(amenity).lower()] = value
return out_data
def get_addresses(in_location):
"""
Parses info for the Wawa address and coordinates
:param in_location: The Wawa location (dict)
:return: The address and coordincate info (dict)
"""
friendly = in_location["addresses"][0]
physical = in_location["addresses"][1]
out_friendly = {
"address": friendly["address"],
"city": friendly["city"],
"state": friendly["state"],
"zip": friendly["zip"]
}
out_physical = {
"longitude": physical["loc"][1],
"latitude": physical["loc"][0],
}
return {"address": out_friendly, "coordinates": out_physical}
def get_wawa_data(limit=None):
"""
Hits the store number url endpoint to pull down Wawa locations and
parse each one's information. We don't know the store numbers as there
is not list of store numbers. Through testing I was able to narrow down
"series" of store numbers, so we iterate through ranges of possible
store numbers, skipping any 404 errors (invalid store id responses
returned by url calls).
:param limit: A cap on the number of Wawa results returned (int) (optional)
:return: Parsed Wawa information (list<dict>)
"""
ssl._create_default_https_context = ssl._create_unverified_context
output = []
for i in POSSIBLE_STORE_NUMS:
response = requests.get(QUERY_URL, params={"storeNumber": i})
if response.status_code != 404:
location = response.json()
geographic_data = get_addresses(location)
address = geographic_data["address"]
coordinates = geographic_data["coordinates"]
gas_prices = parse_gas_prices(location)
amenities = parse_amenities(location)
this_location_output = {
"has_menu": location["hasMenu"],
"last_updated": datetime.datetime.strptime(location["lastUpdated"], "%m/%d/%Y %I:%M %p"),
"location_id": location["locationID"],
"open_24_hours": location["open24Hours"],
"regional_director": location["regionalDirector"],
"store_close": location["storeClose"],
"store_name": location["storeName"],
"store_number": location["storeNumber"],
"store_open": location["storeOpen"],
"telephone": location["telephone"]
}
this_location_output = {**this_location_output, **address}
this_location_output = {**this_location_output, **coordinates}
this_location_output = {**this_location_output, **gas_prices}
this_location_output = {**this_location_output, **amenities}
output.append(this_location_output)
if limit and len(output) == limit:
break
return output
|
__docformat__ = "epytext"
import sys
import os
import random
import tempfile
from datetime import date
class GenName():
"""
@authors: Brigitte Bigi
@contact: brigitte.bigi@gmail.com
@license: GPL
@summary: A class to generates a random file name of a non-existing file.
"""
def __init__(self,extension=""):
self.name = "/"
while (os.path.exists(self.name)==True):
self.set_name(extension)
def set_name(self, extension):
"""
Set a new file name.
"""
# random float value
randval = str(int(random.random()*10000))
# process pid
pid = str(os.getpid())
# today's date
today = str(date.today())
# filename
filename = "tmp_"+today+"_"+pid+"_"+randval
# final file name is path/filename
self.name = filename + extension
def get_name(self):
"""
Get the current file name.
"""
return str(self.name)
if __name__ == "__main__":
print GenName().get_name()
|
from netzob.Common.Type.Endianess import Endianess
from common.NetzobTestCase import NetzobTestCase
class test_Endianess(NetzobTestCase):
def test_BIG(self):
self.assertEqual(Endianess.BIG, "big-endian")
def test_LITTLE(self):
self.assertEqual(Endianess.LITTLE, "little-endian")
|
"""
Copyright (C) 2013 Stanislav Bobovych
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
class RDBDATA_data_entry:
def __init__(self, offset, file_pointer):
old_offset = file_pointer.tell()
file_pointer.seek(offset)
self.data_type, = struct.unpack("<I", file_pointer.read(4))
self.RDB_id, = = struct.unpack("<I", file_pointer.read(4))
self.data_length, = struct.unpack("<I", file_pointer.read(4))
self.unknown, = struct.unpack("<I", file_pointer.read(4))
self.data = file_pointer.read(self.data_length)
file_pointer.seek(old_offset)
class RDBDATA_file:
def __init__(self, filepath=None):
self.filepath = filepath
self.header = None #RDB0
self.data = None
if self.filepath != None:
self.open(filepath)
def open(self, filepath=None):
if filepath == None and self.filepath == None:
print "File path is empty"
return
if self.filepath == None:
self.filepath = filepath
def dump(self, dest_filepath=os.getcwd(), verbose=False):
with open(self.filepath, "rb") as f:
self.header = struct.unpack("IIII", f.read(4))
self.data = f.read()
|
from django.contrib.auth.models import User, Group, Permission
from django.core.exceptions import ValidationError
from django.core.management import call_command
from django.test import TestCase
from django.utils.encoding import force_text
from weblate.lang.models import Language
from weblate.trans.models import Project, Translation, Comment
from weblate.permissions.data import DEFAULT_GROUPS, ADMIN_PERMS
from weblate.permissions.models import AutoGroup, GroupACL
from weblate.permissions.helpers import (
has_group_perm, can_delete_comment, can_edit, can_author_translation,
)
from weblate.trans.tests.test_models import ModelTestCase
class PermissionsTest(TestCase):
def setUp(self):
self.user = User.objects.create_user(
'user', 'test@example.com', 'x'
)
self.owner = User.objects.create_user(
'owner', 'owner@example.com', 'x'
)
self.project = Project.objects.create(slug='test')
self.project.add_user(self.owner, '@Administration')
def test_owner_owned(self):
self.assertTrue(
has_group_perm(
self.owner, 'trans.author_translation', project=self.project
)
)
def test_owner_no_perm(self):
self.assertFalse(
has_group_perm(
self.owner, 'trans.delete_project', project=self.project
)
)
def test_owner_user(self):
self.assertFalse(
has_group_perm(
self.user, 'trans.author_translation', project=self.project
)
)
def test_check_owner(self):
self.assertTrue(
has_group_perm(
self.owner, 'trans.author_translation', project=self.project
)
)
def test_check_user(self):
self.assertFalse(
has_group_perm(
self.user, 'trans.author_translation', project=self.project
)
)
def test_delete_comment_owner(self):
comment = Comment(project=self.project)
self.assertTrue(can_delete_comment(self.owner, comment))
def test_delete_comment_user(self):
comment = Comment(project=self.project)
self.assertFalse(can_delete_comment(self.user, comment))
def test_cache(self):
comment = Comment(project=self.project)
key = ('_can_delete_comment', self.project.get_full_slug())
self.assertTrue(not hasattr(self.user, 'acl_permissions_cache'))
self.assertFalse(can_delete_comment(self.user, comment))
self.assertFalse(self.user.acl_permissions_cache[key])
self.user.acl_permissions_cache[key] = True
self.assertTrue(can_delete_comment(self.user, comment))
def test_default_groups(self):
"""Check consistency of default permissions.
- The admin permissions have to contain all used permissions
"""
for group in DEFAULT_GROUPS:
self.assertEqual(
DEFAULT_GROUPS[group] - ADMIN_PERMS,
set()
)
class GroupACLTest(ModelTestCase):
PERMISSION = "trans.save_translation"
def setUp(self):
super(GroupACLTest, self).setUp()
self.user = User.objects.create_user(
"user", 'test@example.com', 'x'
)
self.privileged = User.objects.create_user(
"privileged", 'other@example.com', 'x'
)
self.group = Group.objects.create(name="testgroup")
self.project = self.subproject.project
self.subproject.translation_set.all().delete()
self.language = Language.objects.get_default()
self.trans = Translation.objects.create(
subproject=self.subproject, language=self.language,
filename="this/is/not/a.template"
)
app, perm = self.PERMISSION.split('.')
self.permission = Permission.objects.get(
codename=perm, content_type__app_label=app
)
self.group.permissions.add(self.permission)
self.privileged.groups.add(self.group)
def test_acl_lockout(self):
"""Basic sanity check.
Group ACL set on a subproject should only allow members of
the marked group to edit it.
"""
self.assertTrue(can_edit(self.user, self.trans, self.PERMISSION))
self.assertTrue(can_edit(self.privileged, self.trans, self.PERMISSION))
acl = GroupACL.objects.create(subproject=self.subproject)
acl.groups.add(self.group)
self.clear_permission_cache()
self.assertTrue(can_edit(self.privileged, self.trans, self.PERMISSION))
self.assertFalse(can_edit(self.user, self.trans, self.PERMISSION))
def test_acl_overlap(self):
"""ACL overlap test.
When two ACLs can apply to a translation object, only the most
specific one should apply.
"""
acl_lang = GroupACL.objects.create(language=self.language)
acl_lang.groups.add(self.group)
self.assertTrue(
can_edit(self.privileged, self.trans, self.PERMISSION))
acl_sub = GroupACL.objects.create(subproject=self.subproject)
self.clear_permission_cache()
self.assertFalse(
can_edit(self.privileged, self.trans, self.PERMISSION))
acl_sub.groups.add(self.group)
self.clear_permission_cache()
self.assertTrue(
can_edit(self.privileged, self.trans, self.PERMISSION))
def test_acl_str(self):
acl = GroupACL()
self.assertIn(
'unspecified', force_text(acl)
)
acl.language = self.language
self.assertIn(
'language=English', force_text(acl)
)
acl.subproject = self.subproject
self.assertIn(
'subproject=Test/Test', force_text(acl)
)
acl.subproject = None
acl.project = self.project
self.assertIn(
'project=Test', force_text(acl)
)
def test_acl_clean(self):
acl = GroupACL()
self.assertRaises(
ValidationError,
acl.clean
)
acl.project = self.project
acl.subproject = self.subproject
acl.save()
self.assertIsNone(acl.project)
def test_acl_project(self):
"""Basic sanity check for project-level actions.
When a Group ACL is set for a project, and only for a project,
it should apply to project-level actions on that project.
"""
acl = GroupACL.objects.get(project=self.project)
acl.groups.add(self.group)
permission = Permission.objects.get(
codename='author_translation', content_type__app_label='trans'
)
acl.permissions.add(permission)
self.group.permissions.add(permission)
self.assertFalse(
can_author_translation(self.user, self.project)
)
self.assertTrue(
can_author_translation(self.privileged, self.project)
)
def test_affects_unrelated(self):
"""Unrelated objects test.
If I set an ACL on an object, it should not affect objects
that it doesn't match. (in this case, a different language)
"""
lang_cs = Language.objects.get(code='cs')
lang_de = Language.objects.get(code='de')
trans_cs = Translation.objects.create(
subproject=self.subproject, language=lang_cs,
filename="this/is/not/a.template"
)
trans_de = Translation.objects.create(
subproject=self.subproject, language=lang_de,
filename="this/is/not/a.template"
)
acl = GroupACL.objects.create(language=lang_cs)
acl.groups.add(self.group)
self.assertTrue(can_edit(self.privileged, trans_cs, self.PERMISSION))
self.assertFalse(can_edit(self.user, trans_cs, self.PERMISSION))
self.assertTrue(can_edit(self.privileged, trans_de, self.PERMISSION))
self.assertTrue(can_edit(self.user, trans_de, self.PERMISSION))
def test_affects_partial_match(self):
"""Partial ACL match test.
If I set an ACL on two criteria, e.g., subproject and language,
it should not affect objects that only match one of the criteria.
"""
lang_cs = Language.objects.get(code='cs')
lang_de = Language.objects.get(code='de')
trans_cs = Translation.objects.create(
subproject=self.subproject, language=lang_cs,
filename="this/is/not/a.template"
)
trans_de = Translation.objects.create(
subproject=self.subproject, language=lang_de,
filename="this/is/not/a.template"
)
acl = GroupACL.objects.create(
language=lang_cs,
subproject=self.subproject
)
acl.groups.add(self.group)
self.assertTrue(can_edit(self.privileged, trans_cs, self.PERMISSION))
self.assertFalse(can_edit(self.user, trans_cs, self.PERMISSION))
self.assertTrue(can_edit(self.privileged, trans_de, self.PERMISSION))
self.assertTrue(can_edit(self.user, trans_de, self.PERMISSION))
def clear_permission_cache(self):
"""Clear permission cache.
This is necessary when testing interaction of the built-in permissions
mechanism and Group ACL. The built-in mechanism will cache results
of `has_perm` and friends, but these can be affected by the Group ACL
lockout. Usually the cache will get cleared on every page request,
but here we need to do it manually.
"""
attribs = (
'_perm_cache',
'_user_perm_cache',
'_group_perm_cache',
'acl_permissions_cache',
'acl_permissions_owner',
'acl_permissions_groups',
)
for cache in attribs:
for user in (self.user, self.privileged):
if hasattr(user, cache):
delattr(user, cache)
def test_group_locked(self):
"""Limited privilege test.
Once a group is used in a GroupACL, it is said to be "locked".
Privileges from the locked group should not apply outside GroupACL.
I.e., if I gain "author_translation" privilege through membership
in a "privileged_group", applicable to Czech language, this should
not apply to any other language.
"""
lang_cs = Language.objects.get(code='cs')
lang_de = Language.objects.get(code='de')
trans_cs = Translation.objects.create(
subproject=self.subproject, language=lang_cs,
filename="this/is/not/a.template"
)
trans_de = Translation.objects.create(
subproject=self.subproject, language=lang_de,
filename="this/is/not/a.template"
)
perm_name = 'trans.author_translation'
permission = Permission.objects.get(
codename='author_translation', content_type__app_label='trans'
)
# Avoid conflict with automatic GroupACL
self.project.groupacl_set.all()[0].permissions.remove(permission)
self.assertFalse(can_edit(self.user, trans_cs, perm_name))
self.assertFalse(can_edit(self.privileged, trans_cs, perm_name))
self.assertFalse(can_edit(self.privileged, trans_de, perm_name))
self.clear_permission_cache()
self.group.permissions.add(permission)
self.assertFalse(can_edit(self.user, trans_cs, perm_name))
self.assertTrue(can_edit(self.privileged, trans_cs, perm_name))
self.assertTrue(can_edit(self.privileged, trans_de, perm_name))
self.clear_permission_cache()
acl = GroupACL.objects.create(language=lang_cs)
acl.groups.add(self.group)
self.assertTrue(can_edit(self.privileged, trans_cs, perm_name))
self.assertFalse(can_edit(self.privileged, trans_de, perm_name))
def test_project_specific(self):
"""Project specificity test.
Project-level actions should only be affected by Group ACLs that
are specific to the project, and don't have other criteria.
E.g., if a GroupACL lists project+language, this should not give
you project-level permissions.
"""
permission = Permission.objects.get(
codename='author_translation', content_type__app_label='trans'
)
self.group.permissions.add(permission)
acl_project_lang = GroupACL.objects.create(
language=self.language,
project=self.project
)
acl_project_lang.groups.add(self.group)
self.assertFalse(has_group_perm(
self.privileged, 'trans.author_translation', project=self.project
))
acl_project_only = GroupACL.objects.get(
language=None,
project=self.project,
)
acl_project_only.groups.add(self.group)
self.clear_permission_cache()
self.assertTrue(has_group_perm(
self.privileged, 'trans.author_translation', project=self.project
))
def test_acl_not_filtered(self):
"""Basic sanity check.
Group ACL set on a subproject should only allow members of
the marked group to edit it.
"""
self.assertTrue(can_edit(self.user, self.trans, self.PERMISSION))
self.assertTrue(can_edit(self.privileged, self.trans, self.PERMISSION))
acl = GroupACL.objects.create(subproject=self.subproject)
acl.groups.add(self.group)
acl.permissions.remove(self.permission)
self.clear_permission_cache()
self.assertTrue(can_edit(self.privileged, self.trans, self.PERMISSION))
self.assertTrue(can_edit(self.user, self.trans, self.PERMISSION))
class AutoGroupTest(TestCase):
@staticmethod
def create_user():
return User.objects.create_user('test1', 'noreply@weblate.org', 'pass')
def test_default(self):
user = self.create_user()
self.assertEqual(user.groups.count(), 1)
def test_none(self):
AutoGroup.objects.all().delete()
user = self.create_user()
self.assertEqual(user.groups.count(), 0)
def test_matching(self):
AutoGroup.objects.create(
match='^.*@weblate.org',
group=Group.objects.get(name='Guests')
)
user = self.create_user()
self.assertEqual(user.groups.count(), 2)
def test_nonmatching(self):
AutoGroup.objects.create(
match='^.*@example.net',
group=Group.objects.get(name='Guests')
)
user = self.create_user()
self.assertEqual(user.groups.count(), 1)
class CommandTest(TestCase):
"""Test for management commands."""
def test_setupgroups(self):
call_command('setupgroups')
group = Group.objects.get(name='Users')
self.assertTrue(
group.permissions.filter(
codename='save_translation'
).exists()
)
call_command('setupgroups', move=True)
|
"""
聚类和EM算法
~~~~~~~~~~~~~~~~
聚类
:copyright: (c) 2016 by the huaxz1986.
:license: lgpl-3.0, see LICENSE for more details.
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets.samples_generator import make_blobs
from chapters.Cluster_EM.gmm import test_GMM,test_GMM_cov_type,test_GMM_n_components
def create_data(centers,num=100,std=0.7):
'''
生成用于聚类的数据集
:param centers: 聚类的中心点组成的数组。如果中心点是二维的,则产生的每个样本都是二维的。
:param num: 样本数
:param std: 每个簇中样本的标准差
:return: 用于聚类的数据集。是一个元组,第一个元素为样本集,第二个元素为样本集的真实簇分类标记
'''
X, labels_true = make_blobs(n_samples=num, centers=centers, cluster_std=std)
return X,labels_true
def plot_data(*data):
'''
绘制用于聚类的数据集
:param data: 可变参数。它是一个元组。元组元素依次为:第一个元素为样本集,第二个元素为样本集的真实簇分类标记
:return: None
'''
X,labels_true=data
labels=np.unique(labels_true)
fig=plt.figure()
ax=fig.add_subplot(1,1,1)
colors='rgbyckm' # 每个簇的样本标记不同的颜色
for i,label in enumerate(labels):
position=labels_true==label
ax.scatter(X[position,0],X[position,1],label="cluster %d"%label,
color=colors[i%len(colors)])
ax.legend(loc="best",framealpha=0.5)
ax.set_xlabel("X[0]")
ax.set_ylabel("Y[1]")
ax.set_title("data")
plt.show()
if __name__=='__main__':
centers=[[1,1],[2,2],[1,2],[10,20]] # 用于产生聚类的中心点
X,labels_true=create_data(centers,1000,0.5) # 产生用于聚类的数据集
# plot_data(X,labels_true) # 绘制用于聚类的数据集
# test_Kmeans(X,labels_true) # 调用 test_Kmeans 函数
# test_Kmeans_nclusters(X,labels_true) # 调用 test_Kmeans_nclusters 函数
# test_Kmeans_n_init(X,labels_true) # 调用 test_Kmeans_n_init 函数
# test_DBSCAN(X,labels_true) # 调用 test_DBSCAN 函数
# test_DBSCAN_epsilon(X,labels_true) # 调用 test_DBSCAN_epsilon 函数
# test_DBSCAN_min_samples(X,labels_true) # 调用 test_DBSCAN_min_samples 函数
# test_AgglomerativeClustering(X,labels_true) # 调用 test_AgglomerativeClustering 函数
# test_AgglomerativeClustering_nclusters(X,labels_true) # 调用 test_AgglomerativeClustering_nclusters 函数
# test_AgglomerativeClustering_linkage(X,labels_true) # 调用 test_AgglomerativeClustering_linkage 函数
# test_GMM(X,labels_true) # 调用 test_GMM 函数
# test_GMM_n_components(X,labels_true) # 调用 test_GMM_n_components 函数
test_GMM_cov_type(X,labels_true) # 调用 test_GMM_cov_type 函数
|
from scapy.all import *
from scapy.layers import dhcp6
from time import time
def duid(ll_addr):
return DUID_LLT(lladdr=ll_addr, timeval=time())
def ias(requested, iface, T1=None, T2=None):
return map(lambda r: __build_ia(r, iface, T1, T2), requested)
def options(requested):
return map(__build_option_by_code, requested)
def __build_ia(request, iface, T1=None, T2=None):
ia = request.__class__(iaid=request.iaid, T1=(T1 == None and request.T1 or T1), T2=(T2 == None and request.T2 or T2))
ia.ianaopts.append(DHCP6OptIAAddress(addr=str(iface.global_ip()), preflft=300, validlft=300))
return ia
def __build_option_by_code(code):
opt = __option_klass_by_code(code)()
if isinstance(opt, DHCP6OptClientFQDN):
opt.fqdn = 'testhost.local.'
elif isinstance(opt, DHCP6OptDNSDomains):
pass
elif isinstance(opt, DHCP6OptDNSServers):
opt.dnsservers.append('2001:500:88:200::10')
elif isinstance(opt, DHCP6OptSNTPServers):
opt.sntpservers.append('2001:500:88:200::10')
return opt
def __option_klass_by_code(code):
return getattr(dhcp6, dhcp6.dhcp6opts_by_code[code])
|
'''
@file freq_scale.py
@brief Sandbox for various frequency scale generators
@author gm
@copyright gm 2014
This file is part of Chartreuse
Chartreuse is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Chartreuse is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Chartreuse. If not, see <http://www.gnu.org/licenses/>.
'''
import numpy
import pylab
class LogFreqScale(object):
'''
Log frequency scale
'''
def __init__(self, length, dft_length, sampling_freq):
self.length = length
self.dft_length = dft_length
self.sampling_freq = sampling_freq
self._Synthesize()
def _Synthesize(self):
'''
Actual processing function for generating the scale
'''
kLowBound = 2.0 * self.sampling_freq / self.dft_length
kHighBound = self.sampling_freq * 0.5
tmp = numpy.linspace(kLowBound, kHighBound, self.length)
tmp[0] = self.sampling_freq / (self.dft_length * (3.0 / 4.0))
self.data = numpy.log2(tmp * 0.001)
if __name__ == "__main__":
import utilities
sampling_freq = 48000.0
dft_bins_count = 2048
low_edge = 62.5
high_edge = 1500.0
low_edge_idx = numpy.ceil(low_edge * dft_bins_count / sampling_freq)
high_edge_idx = dft_bins_count / 2 + 1
length = high_edge_idx - low_edge_idx + 1
generator = LogFreqScale(length, dft_bins_count, sampling_freq)
out_data = generator.data
print(utilities.PrintMetadata(utilities.GetMetadata(out_data)))
pylab.plot(out_data, label = "out")
pylab.legend()
pylab.show()
|
from PyQt4 import QtGui, QtCore
import sys
import math
import numpy as np
from mpl_toolkits.axes_grid import make_axes_locatable, Size
from stamp.plugins.groups.AbstractGroupPlotPlugin import AbstractGroupPlotPlugin, TestWindow, ConfigureDialog
from stamp.plugins.groups.plots.configGUI.extendedErrorBarUI import Ui_ExtendedErrorBarDialog
from stamp.metagenomics import TableHelper
from matplotlib.patches import Rectangle
class ExtendedErrorBar(AbstractGroupPlotPlugin):
'''
Extended error bar plot.
'''
def __init__(self, preferences, parent=None):
AbstractGroupPlotPlugin.__init__(self, preferences, parent)
self.name = 'Extended error bar'
self.type = 'Statistical'
self.bSupportsHighlight = True
self.bPlotFeaturesIndividually = False
self.settings = preferences['Settings']
self.figWidth = self.settings.value('group: ' + self.name + '/width', 7.0).toDouble()[0]
self.figHeightPerRow = self.settings.value('group: ' + self.name + '/row height', 0.2).toDouble()[0]
self.sortingField = self.settings.value('group: ' + self.name + '/field', 'p-values').toString()
self.bShowBarPlot = self.settings.value('group: ' + self.name + '/sequences subplot', True).toBool()
self.bShowPValueLabels = self.settings.value('group: ' + self.name + '/p-value labels', True).toBool()
self.bShowCorrectedPvalues = self.settings.value('group: ' + self.name + '/show corrected p-values', True).toBool()
self.bCustomLimits = self.settings.value('group: ' + self.name + '/use custom limits', False).toBool()
self.minX = self.settings.value('group: ' + self.name + '/minimum', 0.0).toDouble()[0]
self.maxX = self.settings.value('group: ' + self.name + '/maximum', 1.0).toDouble()[0]
self.markerSize = self.settings.value('group: ' + self.name + '/marker size', 30).toInt()[0]
self.bShowStdDev = self.settings.value('group: ' + self.name + '/show std. dev.', False).toBool()
self.endCapSize = self.settings.value('group: ' + self.name + '/end cap size', 0.0).toInt()[0]
self.legendPos = self.settings.value('group: ' + self.name + '/legend position', -1).toInt()[0]
def mirrorProperties(self, plotToCopy):
self.name = plotToCopy.name
self.figWidth = plotToCopy.figWidth
self.figHeightPerRow = plotToCopy.figHeightPerRow
self.sortingField = plotToCopy.sortingField
self.bShowBarPlot = plotToCopy.bShowBarPlot
self.bShowPValueLabels = plotToCopy.bShowPValueLabels
self.bShowCorrectedPvalues = plotToCopy.bShowCorrectedPvalues
self.bCustomLimits = plotToCopy.bCustomLimits
self.minX = plotToCopy.minX
self.maxX = plotToCopy.maxX
self.markerSize = plotToCopy.markerSize
self.bShowStdDev = plotToCopy.bShowStdDev
self.endCapSize = plotToCopy.endCapSize
self.legendPos = plotToCopy.legendPos
def plot(self, profile, statsResults):
# *** Check if there is sufficient data to generate the plot
if len(statsResults.activeData) <= 0:
self.emptyAxis()
return
features = statsResults.getColumn('Features')
if len(features) > 200:
QtGui.QApplication.instance().setOverrideCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
reply = QtGui.QMessageBox.question(self, 'Continue?', 'Profile contains ' + str(len(features)) + ' features. ' +
'It may take several seconds to generate this plot. We recommend filtering your profile first. ' +
'Do you wish to continue?', QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
QtGui.QApplication.instance().restoreOverrideCursor()
if reply == QtGui.QMessageBox.No:
self.emptyAxis()
return
# *** Colour of plot elements
axesColour = str(self.preferences['Axes colour'].name())
group1Colour = str(self.preferences['Group colours'][profile.groupName1].name())
group2Colour = str(self.preferences['Group colours'][profile.groupName2].name())
# *** Colour of plot elements
highlightColor = (0.9, 0.9, 0.9)
# *** Sort data
if self.sortingField == 'p-values':
statsResults.activeData = TableHelper.SortTable(statsResults.activeData,\
[statsResults.dataHeadings['pValues']], False)
elif self.sortingField == 'Effect sizes':
statsResults.activeData = TableHelper.SortTable(statsResults.activeData,\
[statsResults.dataHeadings['EffectSize']],
True, True, False)
elif self.sortingField == 'Feature labels':
statsResults.activeData = TableHelper.SortTableStrCol(statsResults.activeData,\
statsResults.dataHeadings['Features'], False)
features = statsResults.getColumn('Features') # get sorted feature labels
# *** Create lists for each quantity of interest
if statsResults.multCompCorrection.method == 'False discovery rate':
pValueTitle = 'q-value'
else:
pValueTitle = 'p-value'
if self.bShowCorrectedPvalues:
pValueLabels = statsResults.getColumnAsStr('pValuesCorrected')
if statsResults.multCompCorrection.method != 'No correction':
pValueTitle += ' (corrected)'
else:
pValueLabels = statsResults.getColumnAsStr('pValues')
effectSizes = statsResults.getColumn('EffectSize')
lowerCIs = statsResults.getColumn('LowerCI')
upperCIs = statsResults.getColumn('UpperCI')
ciTitle = ('%.3g' % (statsResults.oneMinusAlpha()*100)) + '% confidence intervals'
# *** Truncate feature labels
highlightedFeatures = list(self.preferences['Highlighted group features'])
if self.preferences['Truncate feature names']:
length = self.preferences['Length of truncated feature names']
for i in xrange(0, len(features)):
if len(features[i]) > length+3:
features[i] = features[i][0:length] + '...'
for i in xrange(0, len(highlightedFeatures)):
if len(highlightedFeatures[i]) > length+3:
highlightedFeatures[i] = highlightedFeatures[i][0:length] + '...'
# *** Check that there is at least one significant feature
if len(features) <= 0:
self.emptyAxis('No significant features')
return
# *** Adjust effect size for axis scale
dominateInSample2 = []
percentage1 = []
percentage2 = []
for i in xrange(0, len(effectSizes)):
if statsResults.bConfIntervRatio:
if effectSizes[i] < 1:
# mirror CI across y-axis
effectSizes[i] = 1.0 / effectSizes[i]
lowerCI = effectSizes[i] - (1.0 / upperCIs[i])
upperCI = (1.0 / lowerCIs[i]) - effectSizes[i]
lowerCIs[i] = lowerCI
upperCIs[i] = upperCI
dominateInSample2.append(i)
else:
lowerCIs[i] = effectSizes[i] - lowerCIs[i]
upperCIs[i] = upperCIs[i] - effectSizes[i]
else:
lowerCIs[i] = effectSizes[i] - lowerCIs[i]
upperCIs[i] = upperCIs[i] - effectSizes[i]
if effectSizes[i] < 0.0:
dominateInSample2.append(i)
# *** Set figure size
if self.legendPos == 3 or self.legendPos == 4 or self.legendPos == 8: # bottom legend
heightBottomLabels = 0.56 # inches
else:
heightBottomLabels = 0.4 # inches
heightTopLabels = 0.25
plotHeight = self.figHeightPerRow*len(features)
self.imageWidth = self.figWidth
self.imageHeight = plotHeight + heightBottomLabels + heightTopLabels
if self.imageWidth > 256 or self.imageHeight > 256:
QtGui.QApplication.instance().setOverrideCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.emptyAxis()
reply = QtGui.QMessageBox.question(self, 'Excessively large plot', 'The resulting plot is too large to display.')
QtGui.QApplication.instance().restoreOverrideCursor()
return
self.fig.set_size_inches(self.imageWidth, self.imageHeight)
# *** Determine width of y-axis labels
yLabelBounds = self.yLabelExtents(features, 8)
# *** Size plots which comprise the extended errorbar plot
self.fig.clear()
spacingBetweenPlots = 0.25 # inches
widthNumSeqPlot = 1.25 # inches
if self.bShowBarPlot == False:
widthNumSeqPlot = 0.0
spacingBetweenPlots = 0.0
widthPvalueLabels = 0.75 # inches
if self.bShowPValueLabels == False:
widthPvalueLabels = 0.1
yPlotOffsetFigSpace = heightBottomLabels / self.imageHeight
heightPlotFigSpace = plotHeight / self.imageHeight
xPlotOffsetFigSpace = yLabelBounds.width + 0.1 / self.imageWidth
pValueLabelWidthFigSpace = widthPvalueLabels / self.imageWidth
widthPlotFigSpace = 1.0 - pValueLabelWidthFigSpace - xPlotOffsetFigSpace
widthErrorBarPlot = widthPlotFigSpace*self.imageWidth - widthNumSeqPlot - spacingBetweenPlots
axInitAxis = self.fig.add_axes([xPlotOffsetFigSpace,yPlotOffsetFigSpace,widthPlotFigSpace,heightPlotFigSpace])
divider = make_axes_locatable(axInitAxis)
divider.get_vertical()[0] = Size.Fixed(len(features)*self.figHeightPerRow)
if self.bShowBarPlot == True:
divider.get_horizontal()[0] = Size.Fixed(widthNumSeqPlot)
axErrorbar = divider.new_horizontal(widthErrorBarPlot, pad=spacingBetweenPlots, sharey=axInitAxis)
self.fig.add_axes(axErrorbar)
else:
divider.get_horizontal()[0] = Size.Fixed(widthErrorBarPlot)
axErrorbar = axInitAxis
# *** Plot of sequences for each subsystem
if self.bShowBarPlot == True:
axNumSeq = axInitAxis
meanRelFreqSeqs1 = statsResults.getColumn('MeanRelFreq1')
meanRelFreqSeqs2 = statsResults.getColumn('MeanRelFreq2')
if self.bShowStdDev:
stdDev1 = statsResults.getColumn('StdDevRelFreq1')
stdDev2 = statsResults.getColumn('StdDevRelFreq2')
endCapSize = self.endCapSize
else:
stdDev1 = [0] * len(meanRelFreqSeqs1)
stdDev2 = [0] * len(meanRelFreqSeqs2)
endCapSize = 0
axNumSeq.barh(np.arange(len(features))+0.0, meanRelFreqSeqs1, height = 0.3, xerr=stdDev1, color=group1Colour, ecolor='black', capsize=endCapSize)
axNumSeq.barh(np.arange(len(features))-0.3, meanRelFreqSeqs2, height = 0.3, xerr=stdDev2, color=group2Colour, ecolor='black', capsize=endCapSize)
for value in np.arange(-0.5, len(features)-1, 2):
axNumSeq.axhspan(value, value+1, facecolor=highlightColor,edgecolor='none',zorder=-1)
axNumSeq.set_xlabel('Mean proportion (%)')
maxPercentage = max(max(meanRelFreqSeqs1), max(meanRelFreqSeqs2))
axNumSeq.set_xticks([0, maxPercentage])
axNumSeq.set_xlim([0, maxPercentage*1.05])
maxPercentageStr = '%.1f' % maxPercentage
axNumSeq.set_xticklabels(['0.0', maxPercentageStr])
axNumSeq.set_yticks(np.arange(len(features)))
axNumSeq.set_yticklabels(features)
axNumSeq.set_ylim([-1, len(features)])
for label in axNumSeq.get_yticklabels():
if label.get_text() in highlightedFeatures:
label.set_color('red')
for a in axNumSeq.yaxis.majorTicks:
a.tick1On=False
a.tick2On=False
for a in axNumSeq.xaxis.majorTicks:
a.tick1On=True
a.tick2On=False
for line in axNumSeq.yaxis.get_ticklines():
line.set_color(axesColour)
for line in axNumSeq.xaxis.get_ticklines():
line.set_color(axesColour)
for loc, spine in axNumSeq.spines.iteritems():
if loc in ['left', 'right','top']:
spine.set_color('none')
else:
spine.set_color(axesColour)
# *** Plot confidence intervals for each subsystem
lastAxes = axErrorbar
markerSize = math.sqrt(float(self.markerSize))
axErrorbar.errorbar(effectSizes, np.arange(len(features)), xerr=[lowerCIs,upperCIs], fmt='o', ms=markerSize, mfc=group1Colour, mec='black', ecolor='black', zorder=10)
effectSizesSample2 = [effectSizes[value] for value in dominateInSample2]
axErrorbar.plot(effectSizesSample2, dominateInSample2, ls='', marker='o', ms=markerSize, mfc=group2Colour, mec='black', zorder=100)
if statsResults.bConfIntervRatio:
axErrorbar.vlines(1, -1, len(features), linestyle='dashed', color=axesColour)
else:
axErrorbar.vlines(0, -1, len(features), linestyle='dashed', color=axesColour)
for value in np.arange(-0.5, len(features)-1, 2):
axErrorbar.axhspan(value, value+1, facecolor=highlightColor,edgecolor='none',zorder=1)
axErrorbar.set_title(ciTitle)
axErrorbar.set_xlabel('Difference in mean proportions (%)')
if self.bCustomLimits:
axErrorbar.set_xlim([self.minX, self.maxX])
else:
self.minX, self.maxX = axErrorbar.get_xlim()
if self.bShowBarPlot == False:
axErrorbar.set_yticks(np.arange(len(features)))
axErrorbar.set_yticklabels(features)
axErrorbar.set_ylim([-1, len(features)])
for label in axErrorbar.get_yticklabels():
if label.get_text() in self.preferences['Highlighted group features']:
label.set_color('red')
else:
for label in axErrorbar.get_yticklabels():
label.set_visible(False)
for a in axErrorbar.yaxis.majorTicks:
a.set_visible(False)
for a in axErrorbar.xaxis.majorTicks:
a.tick1On=True
a.tick2On=False
for a in axErrorbar.yaxis.majorTicks:
a.tick1On=False
a.tick2On=False
for line in axErrorbar.yaxis.get_ticklines():
line.set_visible(False)
for line in axErrorbar.xaxis.get_ticklines():
line.set_color(axesColour)
for loc, spine in axErrorbar.spines.iteritems():
if loc in ['left','right','top']:
spine.set_color('none')
else:
spine.set_color(axesColour)
# *** Show p-values on right of last plot
if self.bShowPValueLabels == True:
axRight = lastAxes.twinx()
axRight.set_yticks(np.arange(len(pValueLabels)))
axRight.set_yticklabels(pValueLabels)
axRight.set_ylim([-1, len(pValueLabels)])
axRight.set_ylabel(pValueTitle)
for a in axRight.yaxis.majorTicks:
a.tick1On=False
a.tick2On=False
for loc, spine in axRight.spines.iteritems():
spine.set_color('none')
# *** Legend
if self.legendPos != -1:
legend1 = Rectangle((0, 0), 1, 1, fc=group1Colour)
legend2 = Rectangle((0, 0), 1, 1, fc=group2Colour)
legend = self.fig.legend([legend1, legend2], (profile.groupName1, profile.groupName2), loc=self.legendPos, ncol=2)
legend.get_frame().set_linewidth(0)
self.updateGeometry()
self.draw()
def configure(self, profile, statsResults):
self.statsResults = statsResults
self.configDlg = ConfigureDialog(Ui_ExtendedErrorBarDialog)
# set enabled state of controls
self.configDlg.ui.chkShowStdDev.setChecked(self.bShowBarPlot)
self.configDlg.ui.spinEndCapSize.setValue(self.bShowBarPlot)
self.configDlg.ui.spinMinimumX.setEnabled(self.bCustomLimits)
self.configDlg.ui.spinMaximumX.setEnabled(self.bCustomLimits)
# set current value of controls
self.configDlg.ui.cboSortingField.setCurrentIndex(self.configDlg.ui.cboSortingField.findText(self.sortingField))
self.configDlg.ui.spinFigWidth.setValue(self.figWidth)
self.configDlg.ui.spinFigRowHeight.setValue(self.figHeightPerRow)
self.configDlg.ui.chkShowBarPlot.setChecked(self.bShowBarPlot)
self.configDlg.ui.chkPValueLabels.setChecked(self.bShowPValueLabels)
self.configDlg.ui.chkCorrectedPvalues.setChecked(self.bShowCorrectedPvalues)
self.configDlg.ui.chkCustomLimits.setChecked(self.bCustomLimits)
self.configDlg.ui.spinMinimumX.setValue(self.minX)
self.configDlg.ui.spinMaximumX.setValue(self.maxX)
self.configDlg.ui.spinMarkerSize.setValue(self.markerSize)
self.configDlg.ui.chkShowStdDev.setChecked(self.bShowStdDev)
self.configDlg.ui.spinEndCapSize.setValue(self.endCapSize)
if self.legendPos == 2:
self.configDlg.ui.radioLegendPosUpperLeft.setChecked(True)
elif self.legendPos == 3:
self.configDlg.ui.radioLegendPosLowerLeft.setChecked(True)
elif self.legendPos == 4:
self.configDlg.ui.radioLegendPosLowerRight.setChecked(True)
elif self.legendPos == 8:
self.configDlg.ui.radioLegendPosLowerCentre.setChecked(True)
else:
self.configDlg.ui.radioLegendPosNone.setChecked(True)
if self.configDlg.exec_() == QtGui.QDialog.Accepted:
QtGui.QApplication.instance().setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
self.sortingField = str(self.configDlg.ui.cboSortingField.currentText())
self.figWidth = self.configDlg.ui.spinFigWidth.value()
self.figHeightPerRow = self.configDlg.ui.spinFigRowHeight.value()
self.bShowBarPlot = self.configDlg.ui.chkShowBarPlot.isChecked()
self.bShowPValueLabels = self.configDlg.ui.chkPValueLabels.isChecked()
self.bShowCorrectedPvalues = self.configDlg.ui.chkCorrectedPvalues.isChecked()
self.bCustomLimits = self.configDlg.ui.chkCustomLimits.isChecked()
self.minX = self.configDlg.ui.spinMinimumX.value()
self.maxX = self.configDlg.ui.spinMaximumX.value()
self.markerSize = self.configDlg.ui.spinMarkerSize.value()
self.bShowStdDev = self.configDlg.ui.chkShowStdDev.isChecked()
self.endCapSize = self.configDlg.ui.spinEndCapSize.value()
# legend position
if self.configDlg.ui.radioLegendPosUpperLeft.isChecked() == True:
self.legendPos = 2
elif self.configDlg.ui.radioLegendPosLowerLeft.isChecked() == True:
self.legendPos = 3
elif self.configDlg.ui.radioLegendPosLowerCentre.isChecked() == True:
self.legendPos = 8
elif self.configDlg.ui.radioLegendPosLowerRight.isChecked() == True:
self.legendPos = 4
else:
self.legendPos = -1
self.settings.setValue('group: ' + self.name + '/width', self.figWidth)
self.settings.setValue('group: ' + self.name + '/row height', self.figHeightPerRow)
self.settings.setValue('group: ' + self.name + '/field', self.sortingField)
self.settings.setValue('group: ' + self.name + '/sequences subplot', self.bShowBarPlot)
self.settings.setValue('group: ' + self.name + '/p-value labels', self.bShowPValueLabels)
self.settings.setValue('group: ' + self.name + '/show corrected p-values', self.bShowCorrectedPvalues)
self.settings.setValue('group: ' + self.name + '/use custom limits', self.bCustomLimits)
self.settings.setValue('group: ' + self.name + '/minimum', self.minX)
self.settings.setValue('group: ' + self.name + '/maximum', self.maxX)
self.settings.setValue('group: ' + self.name + '/marker size', self.markerSize)
self.settings.setValue('group: ' + self.name + '/show std. dev.', self.bShowStdDev)
self.settings.setValue('group: ' + self.name + '/end cap size', self.endCapSize)
self.settings.setValue('group: ' + self.name + '/legend position', self.legendPos)
self.plot(profile, statsResults)
QtGui.QApplication.instance().restoreOverrideCursor()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
testWindow = TestWindow(ExtendedErrorBar)
testWindow.show()
sys.exit(app.exec_())
|
"""autogenerated by genpy from loadcell_calibration/GetFactorRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GetFactorRequest(genpy.Message):
_md5sum = "36d09b846be0b371c5f190354dd3153e"
_type = "loadcell_calibration/GetFactorRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """int64 a
int64 b
"""
__slots__ = ['a','b']
_slot_types = ['int64','int64']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
a,b
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetFactorRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.a is None:
self.a = 0
if self.b is None:
self.b = 0
else:
self.a = 0
self.b = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_2q.pack(_x.a, _x.b))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
_x = self
start = end
end += 16
(_x.a, _x.b,) = _struct_2q.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_2q.pack(_x.a, _x.b))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
_x = self
start = end
end += 16
(_x.a, _x.b,) = _struct_2q.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_2q = struct.Struct("<2q")
"""autogenerated by genpy from loadcell_calibration/GetFactorResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GetFactorResponse(genpy.Message):
_md5sum = "b88405221c77b1878a3cbbfff53428d7"
_type = "loadcell_calibration/GetFactorResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """int64 sum
"""
__slots__ = ['sum']
_slot_types = ['int64']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
sum
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetFactorResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.sum is None:
self.sum = 0
else:
self.sum = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_struct_q.pack(self.sum))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 8
(self.sum,) = _struct_q.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(_struct_q.pack(self.sum))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 8
(self.sum,) = _struct_q.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_q = struct.Struct("<q")
class GetFactor(object):
_type = 'loadcell_calibration/GetFactor'
_md5sum = '6a2e34150c00229791cc89ff309fff21'
_request_class = GetFactorRequest
_response_class = GetFactorResponse
|
LOTSANNO_LAYER="LotsAnno"
LOTSANNO_QD="\"MapNumber\" = '*MapNumber*'OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
PLATSANNO_LAYER="PlatsAnno"
PLATSANNO_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
TAXCODEANNO_LAYER="TaxCodeAnno"
TAXCODEANNO_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
TAXNUMANNO_LAYER="TaxlotNumberAnno"
TAXNUMANNO_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ACRESANNO_LAYER="TaxlotAcresAnno"
ACRESANNO_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO10_LAYER="Anno0010scale"
ANNO10_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO20_LAYER="Anno0020scale"
ANNO20_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO30_LAYER="Anno0030scale"
ANNO30_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO40_LAYER="Anno0040scale"
ANNO40_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO50_LAYER="Anno0050scale"
ANNO50_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO60_LAYER="Anno0060scale"
ANNO60_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO100_LAYER="Anno0100scale"
ANNO100_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO200_LAYER="Anno0200scale"
ANNO200_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO400_LAYER="Anno0400scale"
ANNO400_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO800_LAYER="Anno0800scale"
ANNO800_QD="\"MapNumber\" = '*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
ANNO2000_LAYER="Anno2000scale"
ANNO2000_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
CORNER_ABOVE_LAYER="Corner"
CORNER_ABOVE_QD="\"MapNumber\"='*MapNumber*'"
TAXCODELINES_ABOVE_LAYER="TaxCodeLines - Above"
TAXCODELINES_ABOVE_QD=""
TAXLOTLINES_ABOVE_LAYER="TaxlotLines - Above"
TAXLOTLINES_ABOVE_QD="\"LineType\" <> 32"
REFLINES_ABOVE_LAYER="ReferenceLines - Above"
REFLINES_ABOVE_QD="\"MAPNUMBER\" = '*MapNumber*'"
CARTOLINES_ABOVE_LAYER="CartographicLines - Above"
CARTOLINES_ABOVE_QD=""
WATERLINES_ABOVE_LAYER="WaterLines - Above"
WATERLINES_ABOVE_QD=""
WATER_ABOVE_LAYER="Water - Above"
WATER_ABOVE_QD=""
MAPINDEXSEEMAP_LAYER=""
MAPINDEXSEEMAP_QD=""
MAPINDEX_LAYER="SeeMaps"
MAPINDEX_QD="\"IndexMap\" = '*MapNumber*'"
CORNER_BELOW_LAYER="Corner - Below"
CORNER_BELOW_QD=""
TAXCODELINES_BELOW_LAYER="TaxCodeLines - Below"
TAXCODELINES_BELOW_QD=""
TAXLOTLINES_BELOW_LAYER="TaxlotLines - Below"
TAXLOTLINES_BELOW_QD=""
REFLINES_BELOW_LAYER="ReferenceLines - Below"
REFLINES_BELOW_QD=""
CARTOLINES_BELOW_LAYER="CartographicLines - Below"
CARTOLINES_BELOW_QD=""
WATERLINES_BELOW_LAYER="WaterLines - Below"
WATERLINES_BELOW_QD=""
WATER_BELOW_LAYER="Water - Below"
WATER_BELOW_QD=""
PAGELAYOUT_TABLE="giscarto.CREATOR_ASR.PAGELAYOUTELEMENTS"
CANCELLEDNUMBERS_TABLE="giscarto.CREATOR_ASR.CANCELLEDNUMBERS"
CUSTOMDEFINITIONQUERIES_TABLE="CustomDefinitionQueries"
EXTRA1_LAYER="Arrow0010scale"
EXTRA1_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA2_LAYER="Arrow0020scale"
EXTRA2_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA3_LAYER="Arrow0030scale"
EXTRA3_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA4_LAYER="Arrow0040scale"
EXTRA4_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA5_LAYER="Arrow0050scale"
EXTRA5_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA6_LAYER="Arrow0100scale"
EXTRA6_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA7_LAYER="Arrow0200scale"
EXTRA7_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA8_LAYER="Arrow0400scale"
EXTRA8_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA9_LAYER="Arrow2000scale"
EXTRA9_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA10_LAYER="MapSecLines - Below"
EXTRA10_QD="\"MapNumber\"='*MapNumber*'"
EXTRA11_LAYER="Railroad"
EXTRA11_QD="CL <> 'Y'"
EXTRA12_LAYER="MapArea"
EXTRA12_QD="\"MapNumber\"='*MapNumber*'"
EXTRA13_LAYER=""
EXTRA13_QD=""
EXTRA14_LAYER="Taxlots - Above"
EXTRA14_QD="\"MapNumber\"='*MapNumber*'"
EXTRA15_LAYER="Arrow0060scale"
EXTRA15_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA16_LAYER="Landmarks"
EXTRA16_QD="\"MapNumber\"='*MapNumber*' OR \"MapNumber\" is NULL OR \"MapNumber\" = ''"
EXTRA17_LAYER=""
EXTRA17_QD=""
EXTRA18_LAYER=""
EXTRA18_QD=""
EXTRA19_LAYER=""
EXTRA19_QD=""
EXTRA20_LAYER=""
EXTRA20_QD=""
|
'''
Rigidity is a simple wrapper to the built-in csv module that allows for
validation and correction of data being read/written from/to CSV files.
This module allows you to easily construct validation and correction
rulesets to be applied automatically while preserving the csv interface.
This allows you to easily upgrade old software to use new, strict rules.
'''
import rigidity.errors
import rigidity.rules as rules
class Rigidity():
'''
A wrapper for CSV readers and writers that allows
'''
csvobj = None # Declare here to prevent getattr/setattr recursion
#: Do not display output at all.
DISPLAY_NONE = 0
#: Display simple warnings when ValueError is raised by a rule.
DISPLAY_SIMPLE = 1
def __init__(self, csvobj, rules=[], display=DISPLAY_NONE):
'''
:param csvfile: a Reader or Writer object from the csv module;
any calls to this object's methods will be wrapped to perform
the specified rigidity checks.
:param rules=[]: a two dimensional list containing rules to
be applied to columns moving in/out of `csvobj`. The row
indices in this list match the column in the CSV file the list
of rules will be applied to.
:param int display: When an error is thrown, display the row
and information about which column caused the error.
'''
self.csvobj = csvobj
self.rules = rules
self.display = display
if isinstance(rules, dict):
self.keys = rules.keys()
else:
self.keys = range(0, len(rules))
# Wrapper methods for the `csv` interface
def writeheader(self):
'''
Plain pass-through to the given CSV object. It is assumed that
header information is already valid when the CSV object is
constructed.
'''
self.csvobj.writeheader()
def writerow(self, row):
'''
Validate and correct the data provided in `row` and raise an
exception if the validation or correction fails. Then, write the
row to the CSV file.
'''
try:
self.csvobj.writerow(self.validate_write(row))
except rigidity.errors.DropRow:
return
def writerows(self, rows):
'''
Validate and correct the data provided in every row and raise an
exception if the validation or correction fails.
.. note::
Behavior in the case that the data is invalid and cannot be
repaired is undefined. For example, the implementation may
choose to write all valid rows up until the error, or it may
choose to only conduct the write operation after all rows have
been verified. Do not depend on the presence or absence of any
of the rows in `rows` in the event that an exception occurs.
'''
for row in rows:
self.writerow(row)
# New methods, not part of the `csv` interface
def validate(self, row):
'''
.. warning::
This method is deprecated and will be removed in a future
release; it is included only to support old code. It will
not produce consistent results with bi-directional rules.
You should use :meth:`validate_read` or
:meth:`validate_write` instead.
Validate that the row conforms with the specified rules,
correcting invalid rows where the rule is able to do so.
If the row is valid or can be made valid through corrections,
this method will return a row that can be written to the CSV
file. If the row is invalid and cannot be corrected, then this
method will raise an exception.
:param row: a row object that can be passed to a CSVWriter's
writerow() method.
'''
# Ensure mutability - I'm looking at you, tuples!
if not isinstance(row, (list, dict)):
row = list(row)
# Iterate through all keys, updating the data
for key in self.keys:
value = row[key]
for rule in self.rules[key]:
if hasattr(rule, 'apply'):
value = rule.apply(value)
else:
return rule.read(value)
row[key] = value
# Return the updated data
return row
def validate_write(self, row):
'''
Validate that the row conforms with the specified rules,
correcting invalid rows where the rule is able to do so.
If the row is valid or can be made valid through corrections,
this method will return a row that can be written to the CSV
file. If the row is invalid and cannot be corrected, then this
method will raise an exception.
:param row: a row object that can be passed to a CSVWriter's
__next__() method.
'''
# Ensure mutability - I'm looking at you, tuples!
if not isinstance(row, (list, dict)):
row = list(row)
# Iterate through all keys, updating the data
for key in self.keys:
value = row[key]
for rule in self.rules[key]:
try:
value = rule.write(value)
except ValueError as err:
if self.display == self.DISPLAY_SIMPLE:
print('Invalid data encountered in column %s:' % key)
print(' -', row)
print(' - Error raised by rule:', rule)
print('')
raise err
row[key] = value
# Return the updated data
return row
def validate_read(self, row):
'''
Validate that the row conforms with the specified rules,
correcting invalid rows where the rule is able to do so.
If the row is valid or can be made valid through corrections,
this method will return a row that can be written to the CSV
file. If the row is invalid and cannot be corrected, then this
method will raise an exception.
:param row: a row object that can be returned from CSVReader's
readrow() method.
'''
# Ensure mutability - I'm looking at you, tuples!
if not isinstance(row, (list, dict)):
row = list(row)
# Iterate through all keys, updating the data
for key in self.keys:
value = row[key]
for rule in self.rules[key]:
try:
value = rule.read(value)
except ValueError as err:
if self.display == self.DISPLAY_SIMPLE:
print('Invalid data encountered in column %s:' % key)
print(' -', row)
print(' - Error raised by rule:', rule)
print('')
raise err
except IndexError as err:
if self.display == self.DISPLAY_SIMPLE:
print('IndexError raised in column %s:' % key)
print(' -', row)
print(' - Error raised by rule:', rule)
print('')
raise err
row[key] = value
# Return the updated data
return row
def skip(self):
'''
Return a row, skipping validation. This is useful when you want
to skip validation of header information.
'''
return next(self.csvobj)
def __iter__(self):
for row in iter(self.csvobj):
try:
yield self.validate_read(row)
except rigidity.errors.DropRow:
continue
def __next__(self):
'''
Call the __next__() method on the given CSV object, validate and
repair the row it returns, raise an exception if the row cannot
be repaired, and then return the row.
'''
try:
return self.validate_read(next(self.csvobj))
except rigidity.errors.DropRow:
return next(self)
def __getattr__(self, name):
if hasattr(self.csvobj, name):
return getattr(self.csvobj, name)
else:
return super().__getattr__(self, name)
def __setattr__(self, name, value):
if hasattr(self.csvobj, name):
return setattr(self.csvobj, name, value)
super().__setattr__(name, value)
def __delattr__(self, name):
if hasattr(self.csvobj, name):
return delattr(self.csvobj, name)
return super().__delattr__(name)
|
'''enable run-time addition and removal of master link, just like --master on the cnd line'''
''' TO USE:
link add 10.11.12.13:14550
link list
link remove 3 # to remove 3rd output
'''
from pymavlink import mavutil
import time, struct, math, sys, fnmatch, traceback, json, os
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib import mp_util
if mp_util.has_wxpython:
from MAVProxy.modules.lib.mp_menu import *
from MAVProxy.modules.lib.wx_addlink import MPMenulinkAddDialog
dataPackets = frozenset(['BAD_DATA','LOG_DATA'])
delayedPackets = frozenset([ 'MISSION_CURRENT', 'SYS_STATUS', 'VFR_HUD',
'GPS_RAW_INT', 'SCALED_PRESSURE', 'GLOBAL_POSITION_INT',
'NAV_CONTROLLER_OUTPUT' ])
activityPackets = frozenset([ 'HEARTBEAT', 'GPS_RAW_INT', 'GPS_RAW', 'GLOBAL_POSITION_INT', 'SYS_STATUS' ])
radioStatusPackets = frozenset([ 'RADIO', 'RADIO_STATUS'])
preferred_ports = [
'*FTDI*',
"*Arduino_Mega_2560*",
"*3D*",
"*USB_to_UART*",
'*Ardu*',
'*PX4*',
'*Hex_*',
'*Holybro_*',
'*mRo*',
'*FMU*',
'*Swift-Flyer*',
]
class LinkModule(mp_module.MPModule):
def __init__(self, mpstate):
super(LinkModule, self).__init__(mpstate, "link", "link control", public=True, multi_vehicle=True)
self.add_command('link', self.cmd_link, "link control",
["<list|ports|resetstats>",
'add (SERIALPORT)',
'attributes (LINK) (ATTRIBUTES)',
'remove (LINKS)',
'dataratelogging (DLSTATE)',
'hl (HLSTATE)'])
self.add_command('vehicle', self.cmd_vehicle, "vehicle control")
self.add_command('alllinks', self.cmd_alllinks, "send command on all links", ["(COMMAND)"])
self.no_fwd_types = set()
self.no_fwd_types.add("BAD_DATA")
self.add_completion_function('(SERIALPORT)', self.complete_serial_ports)
self.add_completion_function('(LINKS)', self.complete_links)
self.add_completion_function('(LINK)', self.complete_links)
self.add_completion_function('(HLSTATE)', self.complete_hl)
self.add_completion_function('(DLSTATE)', self.complete_dl)
self.last_altitude_announce = 0.0
self.vehicle_list = set()
self.high_latency = False
self.datarate_logging = False
self.datarate_logging_timer = mavutil.periodic_event(1)
self.old_streamrate = 0
self.old_streamrate2 = 0
self.menu_added_console = False
if mp_util.has_wxpython:
self.menu_rm = MPMenuSubMenu('Remove', items=[])
self.menu = MPMenuSubMenu('Link',
items=[MPMenuItem('Add...', 'Add...', '# link add ', handler=MPMenulinkAddDialog()),
self.menu_rm,
MPMenuItem('Ports', 'Ports', '# link ports'),
MPMenuItem('List', 'List', '# link list'),
MPMenuItem('Status', 'Status', '# link')])
self.last_menu_update = 0
def idle_task(self):
'''called on idle'''
if mp_util.has_wxpython:
if self.module('console') is not None:
if not self.menu_added_console:
self.menu_added_console = True
# we don't dynamically update these yet due to a wx bug
self.menu_rm.items = [ MPMenuItem(p, p, '# link remove %s' % p) for p in self.complete_links('') ]
self.module('console').add_menu(self.menu)
else:
self.menu_added_console = False
for m in self.mpstate.mav_master:
m.source_system = self.settings.source_system
m.mav.srcSystem = m.source_system
m.mav.srcComponent = self.settings.source_component
# don't let pending statustext wait forever for last chunk:
for src in self.status.statustexts_by_sysidcompid:
msgids = list(self.status.statustexts_by_sysidcompid[src].keys())
for msgid in msgids:
pending = self.status.statustexts_by_sysidcompid[src][msgid]
if time.time() - pending.last_chunk_time > 1:
self.emit_accumulated_statustext(src, msgid, pending)
# datarate logging if enabled, at 1 Hz
if self.datarate_logging_timer.trigger() and self.datarate_logging:
with open(self.datarate_logging, 'a') as logfile:
for master in self.mpstate.mav_master:
highest_msec_key = (self.target_system, self.target_component)
linkdelay = (self.status.highest_msec.get(highest_msec_key, 0) - master.highest_msec.get(highest_msec_key, 0))*1.0e-3
logfile.write(str(time.strftime("%H:%M:%S")) + "," +
str(self.link_label(master)) + "," +
str(master.linknum) + "," +
str(self.status.counters['MasterIn'][master.linknum]) + "," +
str(self.status.bytecounters['MasterIn'][master.linknum].total()) + "," +
str(linkdelay) + "," +
str(100 * round(master.packet_loss(), 3)) + "\n")
def complete_serial_ports(self, text):
'''return list of serial ports'''
ports = mavutil.auto_detect_serial(preferred_list=preferred_ports)
return [ p.device for p in ports ]
def complete_hl(self, text):
'''return list of hl options'''
return [ 'on', 'off' ]
def complete_dl(self, text):
'''return list of datarate_logging options'''
return [ 'on', 'off' ]
def complete_links(self, text):
'''return list of links'''
try:
ret = [ m.address for m in self.mpstate.mav_master ]
for m in self.mpstate.mav_master:
ret.append(m.address)
if hasattr(m, 'label'):
ret.append(m.label)
return ret
except Exception as e:
print("Caught exception: %s" % str(e))
def cmd_link(self, args):
'''handle link commands'''
if len(args) < 1:
self.show_link()
elif args[0] == "list":
self.cmd_link_list()
elif args[0] == "hl":
self.cmd_hl(args[1:])
elif args[0] == "dataratelogging":
self.cmd_dl(args[1:])
elif args[0] == "add":
if len(args) != 2:
print("Usage: link add LINK")
print('Usage: e.g. link add 127.0.0.1:9876')
print('Usage: e.g. link add 127.0.0.1:9876:{"label":"rfd900"}')
return
self.cmd_link_add(args[1:])
elif args[0] == "attributes":
if len(args) != 3:
print("Usage: link attributes LINK ATTRIBUTES")
print('Usage: e.g. link attributes rfd900 {"label":"bob"}')
return
self.cmd_link_attributes(args[1:])
elif args[0] == "ports":
self.cmd_link_ports()
elif args[0] == "remove":
if len(args) != 2:
print("Usage: link remove LINK")
return
self.cmd_link_remove(args[1:])
elif args[0] == "resetstats":
self.reset_link_stats()
else:
print("usage: link <list|add|remove|attributes|hl|dataratelogging|resetstats>")
def cmd_dl(self, args):
'''Toggle datarate logging'''
if len(args) < 1:
print("Datarate logging is " + ("on" if self.datarate_logging else "off"))
return
elif args[0] == "on":
self.datarate_logging = os.path.join(self.logdir, "dataratelog.csv")
print("Datarate Logging ON, logfile: " + self.datarate_logging)
# Open a new file handle (don't append) for logging
with open(self.datarate_logging, 'w') as logfile:
logfile.write("time, linkname, linkid, packetsreceived, bytesreceived, delaysec, lostpercent\n")
elif args[0] == "off":
print("Datarate Logging OFF")
self.datarate_logging = None
else:
print("usage: dataratelogging <on|off>")
def cmd_hl(self, args):
'''Toggle high latency mode'''
if len(args) < 1:
print("High latency mode is " + str(self.high_latency))
return
elif args[0] == "on":
print("High latency mode ON")
self.high_latency = True
# Tell ArduPilot to start sending HIGH_LATENCY2 messages
self.master.mav.command_long_send(
self.target_system, # target_system
self.target_component,
mavutil.mavlink.MAV_CMD_SET_MESSAGE_INTERVAL, # command
0, # confirmation
mavutil.mavlink.MAVLINK_MSG_ID_HIGH_LATENCY2, # param1 (msg id)
1000000, # param2 (message interval, us)
0, # param3
0, # param4
0, # param5
0, # param6
0) # param7
# and stop sending any other messages
self.old_streamrate = self.settings.streamrate
self.old_streamrate2 = self.settings.streamrate2
self.settings.streamrate = -1
self.settings.streamrate2 = -1
for master in self.mpstate.mav_master:
master.mav.request_data_stream_send(self.mpstate.settings.target_system, self.mpstate.settings.target_component,
mavutil.mavlink.MAV_DATA_STREAM_ALL,
0, 1)
return
elif args[0] == "off":
print("High latency mode OFF")
self.high_latency = False
# Start sending the full message set again
self.settings.streamrate = self.old_streamrate
self.settings.streamrate2 = self.old_streamrate2
for master in self.mpstate.mav_master:
if master.linknum == 0:
rate = self.settings.streamrate
else:
rate = self.settings.streamrate2
if rate != -1 and self.mpstate.settings.streamrate != -1:
master.mav.request_data_stream_send(self.mpstate.settings.target_system, self.mpstate.settings.target_component,
mavutil.mavlink.MAV_DATA_STREAM_ALL,
rate, 1)
# Tell ArduPilot to stop sending HIGH_LATENCY2 messages
self.master.mav.command_long_send(
self.target_system, # target_system
self.target_component,
mavutil.mavlink.MAV_CMD_SET_MESSAGE_INTERVAL, # command
0, # confirmation
mavutil.mavlink.MAVLINK_MSG_ID_HIGH_LATENCY2, # param1 (msg id)
-1, # param2 (message interval)
0, # param3
0, # param4
0, # param5
0, # param6
0) # param7
return
else:
print("usage: hl <on|off>")
def show_link(self):
'''show link information'''
for master in self.mpstate.mav_master:
highest_msec_key = (self.target_system, self.target_component)
linkdelay = (self.status.highest_msec.get(highest_msec_key, 0) - master.highest_msec.get(highest_msec_key, 0))*1.0e-3
if master.linkerror:
status = "DOWN"
else:
status = "OK"
sign_string = ''
try:
if master.mav.signing.sig_count:
if master.mav.signing.secret_key is None:
# unsigned/reject counts are not updated if we
# don't have a signing secret
sign_string = ", (no-signing-secret)"
else:
sign_string = ", unsigned %u reject %u" % (master.mav.signing.unsigned_count, master.mav.signing.reject_count)
except AttributeError as e:
# some mav objects may not have a "signing" attribute
pass
print("link %s %s (%u packets, %u bytes, %.2fs delay, %u lost, %.1f%% loss, rate:%uB/s%s)" % (self.link_label(master),
status,
self.status.counters['MasterIn'][master.linknum],
self.status.bytecounters['MasterIn'][master.linknum].total(),
linkdelay,
master.mav_loss,
master.packet_loss(),
self.status.bytecounters['MasterIn'][master.linknum].rate(),
sign_string))
def reset_link_stats(self):
'''reset link statistics'''
for master in self.mpstate.mav_master:
self.status.counters['MasterIn'][master.linknum] = 0
self.status.bytecounters['MasterIn'][master.linknum].__init__()
master.mav_loss = 0
master.mav_count = 0
def cmd_alllinks(self, args):
'''send command on all links'''
saved_target = self.mpstate.settings.target_system
print("Sending to: ", self.vehicle_list)
for v in sorted(self.vehicle_list):
self.cmd_vehicle([str(v)])
self.mpstate.functions.process_stdin(' '.join(args), True)
self.cmd_vehicle([str(saved_target)])
def cmd_link_list(self):
'''list links'''
print("%u links" % len(self.mpstate.mav_master))
for i in range(len(self.mpstate.mav_master)):
conn = self.mpstate.mav_master[i]
if hasattr(conn, 'label'):
print("%u (%s): %s" % (i, conn.label, conn.address))
else:
print("%u: %s" % (i, conn.address))
def parse_link_attributes(self, some_json):
'''return a dict based on some_json (empty if json invalid)'''
try:
return json.loads(some_json)
except ValueError:
print('Invalid JSON argument: {0}'.format(some_json))
return {}
def parse_link_descriptor(self, descriptor):
'''parse e.g. 'udpin:127.0.0.1:9877:{"foo":"bar"}' into
python structure ("udpin:127.0.0.1:9877", {"foo":"bar"})'''
optional_attributes = {}
link_components = descriptor.split(":{", 1)
device = link_components[0]
if (len(link_components) == 2 and link_components[1].endswith("}")):
# assume json
some_json = "{" + link_components[1]
optional_attributes = self.parse_link_attributes(some_json)
return (device, optional_attributes)
def apply_link_attributes(self, conn, optional_attributes):
for attr in optional_attributes:
print("Applying attribute to link: %s = %s" % (attr, optional_attributes[attr]))
setattr(conn, attr, optional_attributes[attr])
def link_add(self, descriptor, force_connected=False):
'''add new link'''
try:
(device, optional_attributes) = self.parse_link_descriptor(descriptor)
# if there's only 1 colon for port:baud
# and if the first string is a valid serial port, it's a serial connection
if len(device.split(':')) == 2:
ports = mavutil.auto_detect_serial(preferred_list=preferred_ports)
for p in ports:
if p.device == device.split(':')[0]:
# it's a valid serial port, reformat arguments to fit
self.settings.baudrate = int(device.split(':')[1])
device = device.split(':')[0]
break
print("Connect %s source_system=%d" % (device, self.settings.source_system))
try:
conn = mavutil.mavlink_connection(device, autoreconnect=True,
source_system=self.settings.source_system,
baud=self.settings.baudrate,
force_connected=force_connected)
except Exception as e:
# try the same thing but without force-connected for
# backwards-compatability
conn = mavutil.mavlink_connection(device, autoreconnect=True,
source_system=self.settings.source_system,
baud=self.settings.baudrate)
conn.mav.srcComponent = self.settings.source_component
except Exception as msg:
print("Failed to connect to %s : %s" % (descriptor, msg))
return False
if self.settings.rtscts:
conn.set_rtscts(True)
conn.mav.set_callback(self.master_callback, conn)
if hasattr(conn.mav, 'set_send_callback'):
conn.mav.set_send_callback(self.master_send_callback, conn)
conn.linknum = len(self.mpstate.mav_master)
conn.linkerror = False
conn.link_delayed = False
conn.last_heartbeat = 0
conn.last_message = 0
conn.highest_msec = {}
conn.target_system = self.settings.target_system
self.apply_link_attributes(conn, optional_attributes)
self.mpstate.mav_master.append(conn)
self.status.counters['MasterIn'].append(0)
self.status.bytecounters['MasterIn'].append(self.status.ByteCounter())
self.mpstate.vehicle_link_map[conn.linknum] = set(())
try:
mp_util.child_fd_list_add(conn.port.fileno())
except Exception:
pass
return True
def cmd_link_add(self, args):
'''add new link'''
descriptor = args[0]
print("Adding link %s" % descriptor)
self.link_add(descriptor)
def link_attributes(self, link, attributes):
i = self.find_link(link)
if i is None:
print("Connection (%s) not found" % (link,))
return
conn = self.mpstate.mav_master[i]
atts = self.parse_link_attributes(attributes)
self.apply_link_attributes(conn, atts)
def cmd_link_attributes(self, args):
'''change optional link attributes'''
link = args[0]
attributes = args[1]
print("Setting link %s attributes (%s)" % (link, attributes))
self.link_attributes(link, attributes)
def cmd_link_ports(self):
'''show available ports'''
ports = mavutil.auto_detect_serial(preferred_list=preferred_ports)
for p in ports:
print("%s : %s : %s" % (p.device, p.description, p.hwid))
def find_link(self, device):
'''find a device based on number, name or label'''
for i in range(len(self.mpstate.mav_master)):
conn = self.mpstate.mav_master[i]
if (str(i) == device or
conn.address == device or
getattr(conn, 'label', None) == device):
return i
return None
def cmd_link_remove(self, args):
'''remove an link'''
device = args[0]
if len(self.mpstate.mav_master) <= 1:
print("Not removing last link")
return
i = self.find_link(device)
if i is None:
return
conn = self.mpstate.mav_master[i]
print("Removing link %s" % conn.address)
try:
try:
mp_util.child_fd_list_remove(conn.port.fileno())
except Exception:
pass
self.mpstate.mav_master[i].close()
except Exception as msg:
print(msg)
pass
self.mpstate.mav_master.pop(i)
self.status.counters['MasterIn'].pop(i)
self.status.bytecounters['MasterIn'].pop(i)
del self.mpstate.vehicle_link_map[conn.linknum]
# renumber the links
vehicle_link_map_reordered = {}
for j in range(len(self.mpstate.mav_master)):
conn = self.mpstate.mav_master[j]
map_old = self.mpstate.vehicle_link_map[conn.linknum]
conn.linknum = j
vehicle_link_map_reordered[j] = map_old
self.mpstate.vehicle_link_map = vehicle_link_map_reordered
def get_usec(self):
'''time since 1970 in microseconds'''
return int(time.time() * 1.0e6)
def master_send_callback(self, m, master):
'''called on sending a message'''
if self.status.watch is not None:
for msg_type in self.status.watch:
if fnmatch.fnmatch(m.get_type().upper(), msg_type.upper()):
self.mpstate.console.writeln('> '+ str(m))
break
mtype = m.get_type()
if mtype != 'BAD_DATA' and self.mpstate.logqueue:
usec = self.get_usec()
usec = (usec & ~3) | 3 # linknum 3
self.mpstate.logqueue.put(bytearray(struct.pack('>Q', usec) + m.get_msgbuf()))
def handle_msec_timestamp(self, m, master):
'''special handling for MAVLink packets with a time_boot_ms field'''
if m.get_type() == 'GLOBAL_POSITION_INT':
# this is fix time, not boot time
return
msec = m.time_boot_ms
if msec == 0:
return
sysid = m.get_srcSystem()
compid = m.get_srcComponent()
highest_msec_key = (sysid,compid)
highest = master.highest_msec.get(highest_msec_key, 0)
if msec + 30000 < highest:
self.say('Time has wrapped')
print('Time has wrapped', msec, highest)
self.status.highest_msec[highest_msec_key] = msec
for mm in self.mpstate.mav_master:
mm.link_delayed = False
mm.highest_msec[highest_msec_key] = msec
return
# we want to detect when a link is delayed
master.highest_msec[highest_msec_key] = msec
if msec > self.status.highest_msec.get(highest_msec_key, 0):
self.status.highest_msec[highest_msec_key] = msec
if msec < self.status.highest_msec.get(highest_msec_key, 0) and len(self.mpstate.mav_master) > 1 and self.mpstate.settings.checkdelay:
master.link_delayed = True
else:
master.link_delayed = False
def colors_for_severity(self, severity):
severity_colors = {
# tuple is (fg, bg) (as in "white on red")
mavutil.mavlink.MAV_SEVERITY_EMERGENCY: ('white', 'red'),
mavutil.mavlink.MAV_SEVERITY_ALERT: ('white', 'red'),
mavutil.mavlink.MAV_SEVERITY_CRITICAL: ('white', 'red'),
mavutil.mavlink.MAV_SEVERITY_ERROR: ('black', 'orange'),
mavutil.mavlink.MAV_SEVERITY_WARNING: ('black', 'orange'),
mavutil.mavlink.MAV_SEVERITY_NOTICE: ('black', 'yellow'),
mavutil.mavlink.MAV_SEVERITY_INFO: ('white', 'green'),
mavutil.mavlink.MAV_SEVERITY_DEBUG: ('white', 'green'),
}
try:
return severity_colors[severity]
except Exception as e:
print("Exception: %s" % str(e))
return ('white', 'red')
def report_altitude(self, altitude):
'''possibly report a new altitude'''
master = self.master
if getattr(self.console, 'ElevationMap', None) is not None and self.mpstate.settings.basealt != 0:
lat = master.field('GLOBAL_POSITION_INT', 'lat', 0)*1.0e-7
lon = master.field('GLOBAL_POSITION_INT', 'lon', 0)*1.0e-7
alt1 = self.console.ElevationMap.GetElevation(lat, lon)
if alt1 is not None:
alt2 = self.mpstate.settings.basealt
altitude += alt2 - alt1
self.status.altitude = altitude
altitude_converted = self.height_convert_units(altitude)
if (int(self.mpstate.settings.altreadout) > 0 and
math.fabs(altitude_converted - self.last_altitude_announce) >=
int(self.settings.altreadout)):
self.last_altitude_announce = altitude_converted
rounded_alt = int(self.settings.altreadout) * ((self.settings.altreadout/2 + int(altitude_converted)) / int(self.settings.altreadout))
self.say("height %u" % rounded_alt, priority='notification')
def emit_accumulated_statustext(self, key, id, pending):
out = pending.accumulated_statustext()
if out != self.status.last_apm_msg or time.time() > self.status.last_apm_msg_time+2:
(fg, bg) = self.colors_for_severity(pending.severity)
out = pending.accumulated_statustext()
self.mpstate.console.writeln("AP: %s" % out, bg=bg, fg=fg)
self.status.last_apm_msg = out
self.status.last_apm_msg_time = time.time()
del self.status.statustexts_by_sysidcompid[key][id]
def master_msg_handling(self, m, master):
'''link message handling for an upstream link'''
if self.settings.target_system != 0 and m.get_srcSystem() != self.settings.target_system:
# don't process messages not from our target
if m.get_type() == "BAD_DATA":
if self.mpstate.settings.shownoise and mavutil.all_printable(m.data):
out = m.data
if type(m.data) == bytearray:
out = m.data.decode('ascii')
self.mpstate.console.write(out, bg='red')
return
if self.settings.target_system != 0 and master.target_system != self.settings.target_system:
# keep the pymavlink level target system aligned with the MAVProxy setting
master.target_system = self.settings.target_system
if self.settings.target_component != 0 and master.target_component != self.settings.target_component:
# keep the pymavlink level target component aligned with the MAVProxy setting
print("change target_component %u" % self.settings.target_component)
master.target_component = self.settings.target_component
mtype = m.get_type()
if (mtype == 'HEARTBEAT' or mtype == 'HIGH_LATENCY2') and m.type != mavutil.mavlink.MAV_TYPE_GCS:
if self.settings.target_system == 0 and self.settings.target_system != m.get_srcSystem():
self.settings.target_system = m.get_srcSystem()
self.say("online system %u" % self.settings.target_system,'message')
for mav in self.mpstate.mav_master:
mav.target_system = self.settings.target_system
if self.status.heartbeat_error:
self.status.heartbeat_error = False
self.say("heartbeat OK")
if master.linkerror:
master.linkerror = False
self.say("link %s OK" % (self.link_label(master)))
self.status.last_heartbeat = time.time()
master.last_heartbeat = self.status.last_heartbeat
armed = self.master.motors_armed()
if armed != self.status.armed:
self.status.armed = armed
if armed:
self.say("ARMED")
else:
self.say("DISARMED")
if master.flightmode != self.status.flightmode:
self.status.flightmode = master.flightmode
if self.mpstate.functions.input_handler is None:
self.set_prompt(self.status.flightmode + "> ")
if master.flightmode != self.status.last_mode_announced and time.time() > self.status.last_mode_announce + 2:
self.status.last_mode_announce = time.time()
self.status.last_mode_announced = master.flightmode
self.say("Mode " + self.status.flightmode)
if m.type == mavutil.mavlink.MAV_TYPE_FIXED_WING:
self.mpstate.vehicle_type = 'plane'
self.mpstate.vehicle_name = 'ArduPlane'
elif m.type in [mavutil.mavlink.MAV_TYPE_GROUND_ROVER,
mavutil.mavlink.MAV_TYPE_SURFACE_BOAT]:
self.mpstate.vehicle_type = 'rover'
self.mpstate.vehicle_name = 'APMrover2'
elif m.type in [mavutil.mavlink.MAV_TYPE_SUBMARINE]:
self.mpstate.vehicle_type = 'sub'
self.mpstate.vehicle_name = 'ArduSub'
elif m.type in [mavutil.mavlink.MAV_TYPE_QUADROTOR,
mavutil.mavlink.MAV_TYPE_COAXIAL,
mavutil.mavlink.MAV_TYPE_HEXAROTOR,
mavutil.mavlink.MAV_TYPE_OCTOROTOR,
mavutil.mavlink.MAV_TYPE_TRICOPTER,
mavutil.mavlink.MAV_TYPE_HELICOPTER,
mavutil.mavlink.MAV_TYPE_DODECAROTOR]:
self.mpstate.vehicle_type = 'copter'
self.mpstate.vehicle_name = 'ArduCopter'
elif m.type in [mavutil.mavlink.MAV_TYPE_ANTENNA_TRACKER]:
self.mpstate.vehicle_type = 'antenna'
self.mpstate.vehicle_name = 'AntennaTracker'
elif m.type in [mavutil.mavlink.MAV_TYPE_AIRSHIP]:
self.mpstate.vehicle_type = 'blimp'
self.mpstate.vehicle_name = 'Blimp'
elif mtype == 'STATUSTEXT':
class PendingText(object):
def __init__(self):
self.expected_count = None
self.severity = None
self.chunks = {}
self.start_time = time.time()
self.last_chunk_time = time.time()
def add_chunk(self, m): # m is a statustext message
self.severity = m.severity
self.last_chunk_time = time.time()
if hasattr(m, 'chunk_seq'):
# mavlink extensions are present.
chunk_seq = m.chunk_seq
mid = m.id
else:
# Note that m.id may still exist! It will
# contain the value 253, STATUSTEXT's mavlink
# message id. Thus our reliance on the
# presence of chunk_seq.
chunk_seq = 0
mid = 0
self.chunks[chunk_seq] = m.text
if len(m.text) != 50 or mid == 0:
self.expected_count = chunk_seq + 1;
def complete(self):
return (self.expected_count is not None and
self.expected_count == len(self.chunks))
def accumulated_statustext(self):
next_expected_chunk = 0
out = ""
for chunk_seq in sorted(self.chunks.keys()):
if chunk_seq != next_expected_chunk:
out += " ... "
next_expected_chunk = chunk_seq
out += self.chunks[chunk_seq]
next_expected_chunk += 1
return out
key = "%s.%s" % (m.get_srcSystem(), m.get_srcComponent())
if key not in self.status.statustexts_by_sysidcompid:
self.status.statustexts_by_sysidcompid[key] = {}
if hasattr(m, 'chunk_seq'):
mid = m.id
else:
# m.id will have the value of 253, STATUSTEXT mavlink id
mid = 0
if mid not in self.status.statustexts_by_sysidcompid[key]:
self.status.statustexts_by_sysidcompid[key][mid] = PendingText()
pending = self.status.statustexts_by_sysidcompid[key][mid]
pending.add_chunk(m)
if pending.complete():
# we have all of the chunks!
self.emit_accumulated_statustext(key, mid, pending)
elif mtype == "VFR_HUD":
have_gps_lock = False
if 'GPS_RAW' in self.status.msgs and self.status.msgs['GPS_RAW'].fix_type == 2:
have_gps_lock = True
elif 'GPS_RAW_INT' in self.status.msgs and self.status.msgs['GPS_RAW_INT'].fix_type == 3:
have_gps_lock = True
if have_gps_lock and not self.status.have_gps_lock and m.alt != 0:
self.say("GPS lock at %u meters" % m.alt, priority='notification')
self.status.have_gps_lock = True
elif mtype == "GPS_RAW":
if self.status.have_gps_lock:
if m.fix_type != 2 and not self.status.lost_gps_lock and (time.time() - self.status.last_gps_lock) > 3:
self.say("GPS fix lost")
self.status.lost_gps_lock = True
if m.fix_type == 2 and self.status.lost_gps_lock:
self.say("GPS OK")
self.status.lost_gps_lock = False
if m.fix_type == 2:
self.status.last_gps_lock = time.time()
elif mtype == "GPS_RAW_INT":
if self.status.have_gps_lock:
if m.fix_type < 3 and not self.status.lost_gps_lock and (time.time() - self.status.last_gps_lock) > 3:
self.say("GPS fix lost")
self.status.lost_gps_lock = True
if m.fix_type >= 3 and self.status.lost_gps_lock:
self.say("GPS OK")
self.status.lost_gps_lock = False
if m.fix_type >= 3:
self.status.last_gps_lock = time.time()
elif mtype == "NAV_CONTROLLER_OUTPUT" and self.status.flightmode == "AUTO" and self.mpstate.settings.distreadout:
rounded_dist = int(m.wp_dist/self.mpstate.settings.distreadout)*self.mpstate.settings.distreadout
if math.fabs(rounded_dist - self.status.last_distance_announce) >= self.mpstate.settings.distreadout:
if rounded_dist != 0:
self.say("%u" % rounded_dist, priority="progress")
self.status.last_distance_announce = rounded_dist
elif mtype == "GLOBAL_POSITION_INT":
self.report_altitude(m.relative_alt*0.001)
elif mtype == "COMPASSMOT_STATUS":
print(m)
elif mtype == "SIMSTATE":
self.mpstate.is_sitl = True
elif mtype == "ATTITUDE":
att_time = m.time_boot_ms * 0.001
self.mpstate.attitude_time_s = max(self.mpstate.attitude_time_s, att_time)
if self.mpstate.attitude_time_s - att_time > 120:
# cope with wrap
self.mpstate.attitude_time_s = att_time
elif mtype == "COMMAND_ACK":
try:
cmd = mavutil.mavlink.enums["MAV_CMD"][m.command].name
cmd = cmd[8:]
res = mavutil.mavlink.enums["MAV_RESULT"][m.result].name
res = res[11:]
if m.target_component not in [mavutil.mavlink.MAV_COMP_ID_MAVCAN]:
self.mpstate.console.writeln("Got COMMAND_ACK: %s: %s" % (cmd, res))
except Exception:
self.mpstate.console.writeln("Got MAVLink msg: %s" % m)
if m.command == mavutil.mavlink.MAV_CMD_PREFLIGHT_CALIBRATION:
if m.result == mavutil.mavlink.MAV_RESULT_ACCEPTED:
self.say("Calibrated")
elif m.result == mavutil.mavlink.MAV_RESULT_FAILED:
self.say("Calibration failed")
elif m.result == mavutil.mavlink.MAV_RESULT_UNSUPPORTED:
self.say("Calibration unsupported")
elif m.result == mavutil.mavlink.MAV_RESULT_TEMPORARILY_REJECTED:
self.say("Calibration temporarily rejected")
else:
self.say("Calibration response (%u)" % m.result)
elif mtype == "MISSION_ACK":
try:
t = mavutil.mavlink.enums["MAV_MISSION_TYPE"][m.mission_type].name
t = t[12:]
res = mavutil.mavlink.enums["MAV_MISSION_RESULT"][m.type].name
res = res[12:]
self.mpstate.console.writeln("Got MISSION_ACK: %s: %s" % (t, res))
except Exception as e:
self.mpstate.console.writeln("Got MAVLink msg: %s" % m)
else:
#self.mpstate.console.writeln("Got MAVLink msg: %s" % m)
pass
if self.status.watch is not None:
for msg_type in self.status.watch:
if fnmatch.fnmatch(mtype.upper(), msg_type.upper()):
self.mpstate.console.writeln('< '+ str(m))
break
def mavlink_packet(self, msg):
'''handle an incoming mavlink packet'''
pass
def master_callback(self, m, master):
'''process mavlink message m on master, sending any messages to recipients'''
sysid = m.get_srcSystem()
mtype = m.get_type()
if mtype in ['HEARTBEAT', 'HIGH_LATENCY2'] and m.type != mavutil.mavlink.MAV_TYPE_GCS:
compid = m.get_srcComponent()
if sysid not in self.vehicle_list:
self.vehicle_list.add(sysid)
if (sysid, compid) not in self.mpstate.vehicle_link_map[master.linknum]:
self.mpstate.vehicle_link_map[master.linknum].add((sysid, compid))
print("Detected vehicle {0}:{1} on link {2}".format(sysid, compid, master.linknum))
# see if it is handled by a specialised sysid connection
if sysid in self.mpstate.sysid_outputs:
self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf())
if mtype == "GLOBAL_POSITION_INT":
for modname in 'map', 'asterix', 'NMEA', 'NMEA2':
mod = self.module(modname)
if mod is not None:
mod.set_secondary_vehicle_position(m)
return
if getattr(m, '_timestamp', None) is None:
master.post_message(m)
self.status.counters['MasterIn'][master.linknum] += 1
if mtype == 'GLOBAL_POSITION_INT':
# send GLOBAL_POSITION_INT to 2nd GCS for 2nd vehicle display
for sysid in self.mpstate.sysid_outputs:
self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf())
if self.mpstate.settings.fwdpos:
for link in self.mpstate.mav_master:
if link != master:
link.write(m.get_msgbuf())
# and log them
if mtype not in dataPackets and self.mpstate.logqueue:
# put link number in bottom 2 bits, so we can analyse packet
# delay in saved logs
usec = self.get_usec()
usec = (usec & ~3) | master.linknum
self.mpstate.logqueue.put(bytearray(struct.pack('>Q', usec) + m.get_msgbuf()))
# keep the last message of each type around
self.status.msgs[mtype] = m
instance_field = getattr(m, '_instance_field', None)
if mtype not in self.status.msg_count:
self.status.msg_count[mtype] = 0
self.status.msg_count[mtype] += 1
if instance_field is not None:
instance_value = getattr(m, instance_field, None)
if instance_value is not None:
mtype_instance = "%s[%s]" % (mtype, instance_value)
self.status.msgs[mtype_instance] = m
if mtype_instance not in self.status.msg_count:
self.status.msg_count[mtype_instance] = 0
self.status.msg_count[mtype_instance] += 1
if m.get_srcComponent() == mavutil.mavlink.MAV_COMP_ID_GIMBAL and mtype == 'HEARTBEAT':
# silence gimbal heartbeat packets for now
return
if getattr(m, 'time_boot_ms', None) is not None and self.settings.target_system == m.get_srcSystem():
# update link_delayed attribute
self.handle_msec_timestamp(m, master)
if mtype in activityPackets:
if master.linkerror:
master.linkerror = False
self.say("link %s OK" % (self.link_label(master)))
self.status.last_message = time.time()
master.last_message = self.status.last_message
if master.link_delayed and self.mpstate.settings.checkdelay:
# don't process delayed packets that cause double reporting
if mtype in delayedPackets:
return
self.master_msg_handling(m, master)
# don't pass along bad data
if mtype != 'BAD_DATA':
# pass messages along to listeners, except for REQUEST_DATA_STREAM, which
# would lead a conflict in stream rate setting between mavproxy and the other
# GCS
if self.mpstate.settings.mavfwd_rate or mtype != 'REQUEST_DATA_STREAM':
if mtype not in self.no_fwd_types:
for r in self.mpstate.mav_outputs:
r.write(m.get_msgbuf())
sysid = m.get_srcSystem()
target_sysid = self.target_system
# pass to modules
for (mod,pm) in self.mpstate.modules:
if not hasattr(mod, 'mavlink_packet'):
continue
# sysid 51/'3' is used by SiK radio for the injected RADIO/RADIO_STATUS mavlink frames.
# In order to be able to pass these to e.g. the graph module, which is not multi-vehicle,
# special handling is needed, so that the module gets both RADIO_STATUS and (single) target
# vehicle information.
if not(sysid == 51 and mtype in radioStatusPackets):
if not mod.multi_vehicle and sysid != target_sysid:
# only pass packets not from our target to modules that
# have marked themselves as being multi-vehicle capable
continue
try:
mod.mavlink_packet(m)
except Exception as msg:
if self.mpstate.settings.moddebug == 1:
print(msg)
elif self.mpstate.settings.moddebug > 1:
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
def cmd_vehicle(self, args):
'''handle vehicle commands'''
if len(args) < 1:
print("Usage: vehicle SYSID[:COMPID]")
return
a = args[0].split(':')
self.mpstate.settings.target_system = int(a[0])
if len(a) > 1:
self.mpstate.settings.target_component = int(a[1])
# change default link based on most recent HEARTBEAT
best_link = 0
best_timestamp = 0
for i in range(len(self.mpstate.mav_master)):
m = self.mpstate.mav_master[i]
m.target_system = self.mpstate.settings.target_system
m.target_component = self.mpstate.settings.target_component
if 'HEARTBEAT' in m.messages:
stamp = m.messages['HEARTBEAT']._timestamp
src_system = m.messages['HEARTBEAT'].get_srcSystem()
if stamp > best_timestamp:
best_link = i
best_timestamp = stamp
m.link_delayed = False
self.mpstate.settings.link = best_link + 1
print("Set vehicle %s (link %u)" % (args[0], best_link+1))
def init(mpstate):
'''initialise module'''
return LinkModule(mpstate)
|
"""572. Idempotent matrices
https://projecteuler.net/problem=572
A matrix $M$ is called idempotent if $M^2 = M$.
Let $M$ be a three by three matrix : $M=\begin{pmatrix} a & b & c\\\ d & e &
f\\\ g &h &i\\\ \end{pmatrix}$.
Let C(n) be the number of idempotent three by three matrices $M$ with integer
elements such that
$ -n \le a,b,c,d,e,f,g,h,i \le n$.
C(1)=164 and C(2)=848.
Find C(200).
"""
|
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
from keras.datasets import mnist, cifar10, cifar100
from sklearn.preprocessing import LabelBinarizer
from nets import LeNet, LeNetVarDropout, VGG, VGGVarDropout
sess = tf.Session()
def main():
dataset = 'cifar10' # mnist, cifar10, cifar100
# Load the data
# It will be downloaded first if necessary
if dataset == 'mnist':
(X_train, y_train), (X_test, y_test) = mnist.load_data()
img_size = 28
num_classes = 10
num_channels = 1
elif dataset == 'cifar10':
(X_train, y_train), (X_test, y_test) = cifar10.load_data()
img_size = 32
num_classes = 10
num_channels = 3
elif dataset == 'cifar100':
(X_train, y_train), (X_test, y_test) = cifar100.load_data()
img_size = 32
num_classes = 100
num_channels = 3
lb = LabelBinarizer()
lb.fit(y_train)
y_train_one_hot = lb.transform(y_train)
y_test_one_hot = lb.transform(y_test)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train = np.reshape(X_train,[-1,img_size,img_size,num_channels])
X_test = np.reshape(X_test,[-1,img_size,img_size,num_channels])
X_train /= 255
X_test /= 255
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
m = VGGVarDropout(img_size,num_channels,num_classes)
sess.run(tf.global_variables_initializer())
m.fit(X_train,y_train_one_hot,sess)
pred = m.predict(X_test,sess)
y_test = np.squeeze(y_test)
acc = np.mean(np.equal(y_test,pred))
print("\nTest accuracy: %.3f" % acc)
if __name__ == "__main__":
main()
|
from sqlalchemy.sql import functions
from sqlalchemy.sql.selectable import FromClause
from sqlalchemy.sql.elements import ColumnClause
from sqlalchemy.ext.compiler import compiles
class FunctionColumn(ColumnClause):
def __init__(self, function, name, type_=None):
self.function = self.table = function
self.name = self.key = name
self.type_ = type_
self.is_literal = False
@property
def _from_objects(self):
return []
def _make_proxy(self, selectable, name=None, attach=True,
name_is_truncatable=False, **kw):
co = ColumnClause(self.name, self.type_)
co.table = selectable
co._proxies = [self]
if selectable._is_clone_of is not None:
co._is_clone_of = \
selectable._is_clone_of.columns.get(co.key)
if attach:
selectable._columns[co.key] = co
return co
@compiles(FunctionColumn)
def _compile_function_column(element, compiler, **kw):
return "(%s).%s" % (
compiler.process(element.function, **kw),
compiler.preparer.quote(element.name)
)
class ColumnFunction(functions.FunctionElement):
__visit_name__ = 'function'
@property
def columns(self):
return FromClause.columns.fget(self)
def _populate_column_collection(self):
for name, t in self.column_names:
self._columns[name] = FunctionColumn(self, name, t)
|
import discord
from discord.ext import commands
from .utils.chat_formatting import escape_mass_mentions, italics, pagify
from random import randint
from random import choice
from enum import Enum
from urllib.parse import quote_plus
import datetime
import time
import aiohttp
import asyncio
settings = {"POLL_DURATION" : 60}
class RPS(Enum):
rock = "\N{MOYAI}"
paper = "\N{PAGE FACING UP}"
scissors = "\N{BLACK SCISSORS}"
class RPSParser:
def __init__(self, argument):
argument = argument.lower()
if argument == "rock":
self.choice = RPS.rock
elif argument == "paper":
self.choice = RPS.paper
elif argument == "scissors":
self.choice = RPS.scissors
else:
raise
class General:
"""General commands."""
def __init__(self, bot):
self.bot = bot
self.stopwatches = {}
self.ball = ["As I see it, yes", "It is certain", "It is decidedly so", "Most likely", "Outlook good",
"Signs point to yes", "Without a doubt", "Yes", "Yes – definitely", "You may rely on it", "Reply hazy, try again",
"Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again",
"Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"]
self.poll_sessions = []
@commands.command(hidden=True)
async def ping(self):
"""Pong."""
await self.bot.say("Pong.")
@commands.command()
async def choose(self, *choices):
"""Chooses between multiple choices.
To denote multiple choices, you should use double quotes.
"""
choices = [escape_mass_mentions(c) for c in choices]
if len(choices) < 2:
await self.bot.say('Not enough choices to pick from.')
else:
await self.bot.say(choice(choices))
@commands.command(pass_context=True)
async def roll(self, ctx, number : int = 100):
"""Rolls random number (between 1 and user choice)
Defaults to 100.
"""
author = ctx.message.author
if number > 1:
n = randint(1, number)
await self.bot.say("{} :game_die: {} :game_die:".format(author.mention, n))
else:
await self.bot.say("{} Maybe higher than 1? ;P".format(author.mention))
@commands.command(pass_context=True)
async def flip(self, ctx, user : discord.Member=None):
"""Flips a coin... or a user.
Defaults to coin.
"""
if user != None:
msg = ""
if user.id == self.bot.user.id:
user = ctx.message.author
msg = "Nice try. You think this is funny? How about *this* instead:\n\n"
char = "abcdefghijklmnopqrstuvwxyz"
tran = "ɐqɔpǝɟƃɥᴉɾʞlɯuodbɹsʇnʌʍxʎz"
table = str.maketrans(char, tran)
name = user.display_name.translate(table)
char = char.upper()
tran = "∀qƆpƎℲפHIſʞ˥WNOԀQᴚS┴∩ΛMX⅄Z"
table = str.maketrans(char, tran)
name = name.translate(table)
await self.bot.say(msg + "(╯°□°)╯︵ " + name[::-1])
else:
await self.bot.say("*flips a coin and... " + choice(["HEADS!*", "TAILS!*"]))
@commands.command(pass_context=True)
async def rps(self, ctx, your_choice : RPSParser):
"""Play rock paper scissors"""
author = ctx.message.author
player_choice = your_choice.choice
red_choice = choice((RPS.rock, RPS.paper, RPS.scissors))
cond = {
(RPS.rock, RPS.paper) : False,
(RPS.rock, RPS.scissors) : True,
(RPS.paper, RPS.rock) : True,
(RPS.paper, RPS.scissors) : False,
(RPS.scissors, RPS.rock) : False,
(RPS.scissors, RPS.paper) : True
}
if red_choice == player_choice:
outcome = None # Tie
else:
outcome = cond[(player_choice, red_choice)]
if outcome is True:
await self.bot.say("{} You win {}!"
"".format(red_choice.value, author.mention))
elif outcome is False:
await self.bot.say("{} You lose {}!"
"".format(red_choice.value, author.mention))
else:
await self.bot.say("{} We're square {}!"
"".format(red_choice.value, author.mention))
@commands.command(name="8", aliases=["8ball"])
async def _8ball(self, *, question : str):
"""Ask 8 ball a question
Question must end with a question mark.
"""
if question.endswith("?") and question != "?":
await self.bot.say("`" + choice(self.ball) + "`")
else:
await self.bot.say("That doesn't look like a question.")
@commands.command(aliases=["sw"], pass_context=True)
async def stopwatch(self, ctx):
"""Starts/stops stopwatch"""
author = ctx.message.author
if not author.id in self.stopwatches:
self.stopwatches[author.id] = int(time.perf_counter())
await self.bot.say(author.mention + " Stopwatch started!")
else:
tmp = abs(self.stopwatches[author.id] - int(time.perf_counter()))
tmp = str(datetime.timedelta(seconds=tmp))
await self.bot.say(author.mention + " Stopwatch stopped! Time: **" + tmp + "**")
self.stopwatches.pop(author.id, None)
@commands.command()
async def lmgtfy(self, *, search_terms : str):
"""Creates a lmgtfy link"""
search_terms = escape_mass_mentions(search_terms.replace(" ", "+"))
await self.bot.say("https://lmgtfy.com/?q={}".format(search_terms))
@commands.command(no_pm=True, hidden=True)
async def hug(self, user : discord.Member, intensity : int=1):
"""Because everyone likes hugs
Up to 10 intensity levels."""
name = italics(user.display_name)
if intensity <= 0:
msg = "(っ˘̩╭╮˘̩)っ" + name
elif intensity <= 3:
msg = "(っ´▽`)っ" + name
elif intensity <= 6:
msg = "╰(*´︶`*)╯" + name
elif intensity <= 9:
msg = "(つ≧▽≦)つ" + name
elif intensity >= 10:
msg = "(づ ̄ ³ ̄)づ{} ⊂(´・ω・`⊂)".format(name)
await self.bot.say(msg)
@commands.command(pass_context=True, no_pm=True)
async def userinfo(self, ctx, *, user: discord.Member=None):
"""Shows users's informations"""
author = ctx.message.author
server = ctx.message.server
if not user:
user = author
roles = [x.name for x in user.roles if x.name != "@everyone"]
joined_at = self.fetch_joined_at(user, server)
since_created = (ctx.message.timestamp - user.created_at).days
since_joined = (ctx.message.timestamp - joined_at).days
user_joined = joined_at.strftime("%d %b %Y %H:%M")
user_created = user.created_at.strftime("%d %b %Y %H:%M")
member_number = sorted(server.members,
key=lambda m: m.joined_at).index(user) + 1
created_on = "{}\n({} days ago)".format(user_created, since_created)
joined_on = "{}\n({} days ago)".format(user_joined, since_joined)
game = "Chilling in {} status".format(user.status)
if user.game is None:
pass
elif user.game.url is None:
game = "Playing {}".format(user.game)
else:
game = "Streaming: [{}]({})".format(user.game, user.game.url)
if roles:
roles = sorted(roles, key=[x.name for x in server.role_hierarchy
if x.name != "@everyone"].index)
roles = ", ".join(roles)
else:
roles = "None"
data = discord.Embed(description=game, colour=user.colour)
data.add_field(name="Joined Discord on", value=created_on)
data.add_field(name="Joined this server on", value=joined_on)
data.add_field(name="Roles", value=roles, inline=False)
data.set_footer(text="Member #{} | User ID:{}"
"".format(member_number, user.id))
name = str(user)
name = " ~ ".join((name, user.nick)) if user.nick else name
if user.avatar_url:
data.set_author(name=name, url=user.avatar_url)
data.set_thumbnail(url=user.avatar_url)
else:
data.set_author(name=name)
try:
await self.bot.say(embed=data)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command(pass_context=True, no_pm=True)
async def serverinfo(self, ctx):
"""Shows server's informations"""
server = ctx.message.server
online = len([m.status for m in server.members
if m.status == discord.Status.online or
m.status == discord.Status.idle])
total_users = len(server.members)
text_channels = len([x for x in server.channels
if x.type == discord.ChannelType.text])
voice_channels = len(server.channels) - text_channels
passed = (ctx.message.timestamp - server.created_at).days
created_at = ("Since {}. That's over {} days ago!"
"".format(server.created_at.strftime("%d %b %Y %H:%M"),
passed))
colour = ''.join([choice('0123456789ABCDEF') for x in range(6)])
colour = int(colour, 16)
data = discord.Embed(
description=created_at,
colour=discord.Colour(value=colour))
data.add_field(name="Region", value=str(server.region))
data.add_field(name="Users", value="{}/{}".format(online, total_users))
data.add_field(name="Text Channels", value=text_channels)
data.add_field(name="Voice Channels", value=voice_channels)
data.add_field(name="Roles", value=len(server.roles))
data.add_field(name="Owner", value=str(server.owner))
data.set_footer(text="Server ID: " + server.id)
if server.icon_url:
data.set_author(name=server.name, url=server.icon_url)
data.set_thumbnail(url=server.icon_url)
else:
data.set_author(name=server.name)
try:
await self.bot.say(embed=data)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command()
async def urban(self, *, search_terms : str, definition_number : int=1):
"""Urban Dictionary search
Definition number must be between 1 and 10"""
def encode(s):
return quote_plus(s, encoding='utf-8', errors='replace')
# definition_number is just there to show up in the help
# all this mess is to avoid forcing double quotes on the user
search_terms = search_terms.split(" ")
try:
if len(search_terms) > 1:
pos = int(search_terms[-1]) - 1
search_terms = search_terms[:-1]
else:
pos = 0
if pos not in range(0, 11): # API only provides the
pos = 0 # top 10 definitions
except ValueError:
pos = 0
search_terms = "+".join([encode(s) for s in search_terms])
url = "http://api.urbandictionary.com/v0/define?term=" + search_terms
try:
async with aiohttp.get(url) as r:
result = await r.json()
if result["list"]:
definition = result['list'][pos]['definition']
example = result['list'][pos]['example']
defs = len(result['list'])
msg = ("**Definition #{} out of {}:\n**{}\n\n"
"**Example:\n**{}".format(pos+1, defs, definition,
example))
msg = pagify(msg, ["\n"])
for page in msg:
await self.bot.say(page)
else:
await self.bot.say("Your search terms gave no results.")
except IndexError:
await self.bot.say("There is no definition #{}".format(pos+1))
except:
await self.bot.say("Error.")
@commands.command(pass_context=True, no_pm=True)
async def poll(self, ctx, *text):
"""Starts/stops a poll
Usage example:
poll Is this a poll?;Yes;No;Maybe
poll stop"""
message = ctx.message
if len(text) == 1:
if text[0].lower() == "stop":
await self.endpoll(message)
return
if not self.getPollByChannel(message):
check = " ".join(text).lower()
if "@everyone" in check or "@here" in check:
await self.bot.say("Nice try.")
return
p = NewPoll(message, " ".join(text), self)
if p.valid:
self.poll_sessions.append(p)
await p.start()
else:
await self.bot.say("poll question;option1;option2 (...)")
else:
await self.bot.say("A poll is already ongoing in this channel.")
async def endpoll(self, message):
if self.getPollByChannel(message):
p = self.getPollByChannel(message)
if p.author == message.author.id: # or isMemberAdmin(message)
await self.getPollByChannel(message).endPoll()
else:
await self.bot.say("Only admins and the author can stop the poll.")
else:
await self.bot.say("There's no poll ongoing in this channel.")
def getPollByChannel(self, message):
for poll in self.poll_sessions:
if poll.channel == message.channel:
return poll
return False
async def check_poll_votes(self, message):
if message.author.id != self.bot.user.id:
if self.getPollByChannel(message):
self.getPollByChannel(message).checkAnswer(message)
def fetch_joined_at(self, user, server):
"""Just a special case for someone special :^)"""
if user.id == "96130341705637888" and server.id == "133049272517001216":
return datetime.datetime(2016, 1, 10, 6, 8, 4, 443000)
else:
return user.joined_at
class NewPoll():
def __init__(self, message, text, main):
self.channel = message.channel
self.author = message.author.id
self.client = main.bot
self.poll_sessions = main.poll_sessions
msg = [ans.strip() for ans in text.split(";")]
if len(msg) < 2: # Needs at least one question and 2 choices
self.valid = False
return None
else:
self.valid = True
self.already_voted = []
self.question = msg[0]
msg.remove(self.question)
self.answers = {}
i = 1
for answer in msg: # {id : {answer, votes}}
self.answers[i] = {"ANSWER" : answer, "VOTES" : 0}
i += 1
async def start(self):
msg = "**POLL STARTED!**\n\n{}\n\n".format(self.question)
for id, data in self.answers.items():
msg += "{}. *{}*\n".format(id, data["ANSWER"])
msg += "\nType the number to vote!"
await self.client.send_message(self.channel, msg)
await asyncio.sleep(settings["POLL_DURATION"])
if self.valid:
await self.endPoll()
async def endPoll(self):
self.valid = False
msg = "**POLL ENDED!**\n\n{}\n\n".format(self.question)
for data in self.answers.values():
msg += "*{}* - {} votes\n".format(data["ANSWER"], str(data["VOTES"]))
await self.client.send_message(self.channel, msg)
self.poll_sessions.remove(self)
def checkAnswer(self, message):
try:
i = int(message.content)
if i in self.answers.keys():
if message.author.id not in self.already_voted:
data = self.answers[i]
data["VOTES"] += 1
self.answers[i] = data
self.already_voted.append(message.author.id)
except ValueError:
pass
def setup(bot):
n = General(bot)
bot.add_listener(n.check_poll_votes, "on_message")
bot.add_cog(n)
|
import _thread
import RPi.GPIO as GPIO
import socket
import time
from time import sleep
from sys import exit
import datetime
Zones = [5, 6, 13, 19]
StatusLED = 16
CancelButton = 18
WaterSensor = 10
Sensor = False
isRaining = False
defaultWaitDuration = 0
def setup():
global serversocket,t
# Setup GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(True)
# Input Cancel Button
GPIO.setup(CancelButton, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Input Rain Sensor
if Sensor:
GPIO.setup(WaterSensor, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Setup 4 zones on GPIO
# Turn all Zones "OFF"
for i in Zones:
GPIO.setup(i, GPIO.OUT)
GPIO.output(i, GPIO.HIGH)
# Setup status LED
GPIO.setup(StatusLED, GPIO.OUT)
# Setup Sockets
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host = socket.gethostname()
port = 9999
serversocket.bind((host, port))
serversocket.listen(5)
addLog("System", "Setup complete")
def mainRun():
global isRaining
addLog("System", "Main Thread started")
# Always check the switch
_thread.start_new_thread(checkSwitch, ((),))
while True:
global serversocket
clientsocket,addr = serversocket.accept()
fromClient = clientsocket.recv(1024)
clientsocket.close()
strFromClient = str(fromClient.decode("ascii"))
addLog("Recived", strFromClient)
# Split incoming message
requestType = strFromClient.split(":")
# Do something with that message
# What was the command?
if(requestType[0] == "WATER"):
# Is it raining
if(isRaining == False):
# Turn off LED if it was raining
statusLED("off")
# Start watering
_thread.start_new_thread(water, (requestType[1], requestType[2], ) )
elif(requestType[0] == "ZONE"):
if(requestType[1] == "ON"):
zone(int(requestType[2]), "ON")
else:
zone(int(requestType[2]), "OFF")
elif(requestType[0] == "RainStatus"):
# Some day we will send something back
print("nothing")
elif(requestType[0] == "QUIT"):
destroy()
def checkSwitch(self):
global isRaining
while True:
state = GPIO.input(CancelButton)
if(state):
if(state != isRaining):
addLog("System", "Switch TRUE")
statusLED("solid")
isRaining = True
else:
if(state != isRaining):
addLog("System", "Switch FALSE")
statusLED("off")
isRaining = False
sleep(2)
def water(zoneNum, duration):
# Turn on zone
zone(int(zoneNum), "ON")
statusLED("on")
# Sleep for that amount
sleep(int(duration) * 60)
# Turn off zone
zone(int(zoneNum), "OFF")
statusLED("off")
def zone(zoneSelect, onoff):
if(onoff == "ON"):
GPIO.output(Zones[zoneSelect], 0)
addLog('Zone ' + str(zoneSelect), 'ON')
else:
GPIO.output(Zones[zoneSelect], 1)
addLog('Zone ' + str(zoneSelect), 'OFF')
def rain():
global isRaining
# Check if it's raining
if Sensor:
if GPIO.input(WaterSensor):
isRaining = True
else:
isRaining = False
def statusLED(status):
if status == "blink":
GPIO.output(StatusLED, GPIO.HIGH)
sleep(0.5)
GPIO.output(StatusLED, GPIO.LOW)
sleep(0.5)
elif status == "solid":
GPIO.output(StatusLED, GPIO.HIGH)
elif status == "off":
GPIO.output(StatusLED, GPIO.LOW)
def addLog(currentZone, addedText):
now = datetime.datetime.now()
print ("{0}: {1}: {2}".format(now, currentZone, addedText))
def destroy():
global serversocket
serversocket.shutdown(socket.SHUT_RDWR)
for i in Zones:
GPIO.output(i, GPIO.LOW)
GPIO.output(StatusLED, GPIO.LOW)
addLog('System', 'Sprinkler Script OFF')
exit()
if __name__ == '__main__':
setup()
try:
mainRun()
except KeyboardInterrupt:
destroy()
finally:
GPIO.cleanup()
exit()
else:
destroy()
|
import os
import re
from time import sleep
from ansible.module_utils.cloud import CloudRetry
try:
import boto
import boto.ec2 #boto does weird import stuff
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
try:
import boto3
import botocore
HAS_BOTO3 = True
except:
HAS_BOTO3 = False
try:
from distutils.version import LooseVersion
HAS_LOOSE_VERSION = True
except:
HAS_LOOSE_VERSION = False
from ansible.module_utils.six import string_types, binary_type, text_type
class AnsibleAWSError(Exception):
pass
def _botocore_exception_maybe():
"""
Allow for boto3 not being installed when using these utils by wrapping
botocore.exceptions instead of assigning from it directly.
"""
if HAS_BOTO3:
return botocore.exceptions.ClientError
return type(None)
class AWSRetry(CloudRetry):
base_class = _botocore_exception_maybe()
@staticmethod
def status_code_from_exception(error):
return error.response['Error']['Code']
@staticmethod
def found(response_code):
# This list of failures is based on this API Reference
# http://docs.aws.amazon.com/AWSEC2/latest/APIReference/errors-overview.html
retry_on = [
'RequestLimitExceeded', 'Unavailable', 'ServiceUnavailable',
'InternalFailure', 'InternalError'
]
not_found = re.compile(r'^\w+.NotFound')
if response_code in retry_on or not_found.search(response_code):
return True
else:
return False
def boto3_conn(module, conn_type=None, resource=None, region=None, endpoint=None, **params):
try:
return _boto3_conn(conn_type=conn_type, resource=resource, region=region, endpoint=endpoint, **params)
except ValueError:
module.fail_json(msg='There is an issue in the code of the module. You must specify either both, resource or client to the conn_type parameter in the boto3_conn function call')
def _boto3_conn(conn_type=None, resource=None, region=None, endpoint=None, **params):
profile = params.pop('profile_name', None)
if conn_type not in ['both', 'resource', 'client']:
raise ValueError('There is an issue in the calling code. You '
'must specify either both, resource, or client to '
'the conn_type parameter in the boto3_conn function '
'call')
if conn_type == 'resource':
resource = boto3.session.Session(profile_name=profile).resource(resource, region_name=region, endpoint_url=endpoint, **params)
return resource
elif conn_type == 'client':
client = boto3.session.Session(profile_name=profile).client(resource, region_name=region, endpoint_url=endpoint, **params)
return client
else:
client = boto3.session.Session(profile_name=profile).client(resource, region_name=region, endpoint_url=endpoint, **params)
resource = boto3.session.Session(profile_name=profile).resource(resource, region_name=region, endpoint_url=endpoint, **params)
return client, resource
boto3_inventory_conn = _boto3_conn
def aws_common_argument_spec():
return dict(
ec2_url=dict(),
aws_secret_key=dict(aliases=['ec2_secret_key', 'secret_key'], no_log=True),
aws_access_key=dict(aliases=['ec2_access_key', 'access_key']),
validate_certs=dict(default=True, type='bool'),
security_token=dict(aliases=['access_token'], no_log=True),
profile=dict(),
)
def ec2_argument_spec():
spec = aws_common_argument_spec()
spec.update(
dict(
region=dict(aliases=['aws_region', 'ec2_region']),
)
)
return spec
def get_aws_connection_info(module, boto3=False):
# Check module args for credentials, then check environment vars
# access_key
ec2_url = module.params.get('ec2_url')
access_key = module.params.get('aws_access_key')
secret_key = module.params.get('aws_secret_key')
security_token = module.params.get('security_token')
region = module.params.get('region')
profile_name = module.params.get('profile')
validate_certs = module.params.get('validate_certs')
if not ec2_url:
if 'AWS_URL' in os.environ:
ec2_url = os.environ['AWS_URL']
elif 'EC2_URL' in os.environ:
ec2_url = os.environ['EC2_URL']
if not access_key:
if 'AWS_ACCESS_KEY_ID' in os.environ:
access_key = os.environ['AWS_ACCESS_KEY_ID']
elif 'AWS_ACCESS_KEY' in os.environ:
access_key = os.environ['AWS_ACCESS_KEY']
elif 'EC2_ACCESS_KEY' in os.environ:
access_key = os.environ['EC2_ACCESS_KEY']
else:
# in case access_key came in as empty string
access_key = None
if not secret_key:
if 'AWS_SECRET_ACCESS_KEY' in os.environ:
secret_key = os.environ['AWS_SECRET_ACCESS_KEY']
elif 'AWS_SECRET_KEY' in os.environ:
secret_key = os.environ['AWS_SECRET_KEY']
elif 'EC2_SECRET_KEY' in os.environ:
secret_key = os.environ['EC2_SECRET_KEY']
else:
# in case secret_key came in as empty string
secret_key = None
if not region:
if 'AWS_REGION' in os.environ:
region = os.environ['AWS_REGION']
elif 'AWS_DEFAULT_REGION' in os.environ:
region = os.environ['AWS_DEFAULT_REGION']
elif 'EC2_REGION' in os.environ:
region = os.environ['EC2_REGION']
else:
if not boto3:
# boto.config.get returns None if config not found
region = boto.config.get('Boto', 'aws_region')
if not region:
region = boto.config.get('Boto', 'ec2_region')
elif HAS_BOTO3:
# here we don't need to make an additional call, will default to 'us-east-1' if the below evaluates to None.
region = botocore.session.get_session().get_config_variable('region')
else:
module.fail_json(msg="Boto3 is required for this module. Please install boto3 and try again")
if not security_token:
if 'AWS_SECURITY_TOKEN' in os.environ:
security_token = os.environ['AWS_SECURITY_TOKEN']
elif 'AWS_SESSION_TOKEN' in os.environ:
security_token = os.environ['AWS_SESSION_TOKEN']
elif 'EC2_SECURITY_TOKEN' in os.environ:
security_token = os.environ['EC2_SECURITY_TOKEN']
else:
# in case security_token came in as empty string
security_token = None
if HAS_BOTO3 and boto3:
boto_params = dict(aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
aws_session_token=security_token)
boto_params['verify'] = validate_certs
if profile_name:
boto_params['profile_name'] = profile_name
else:
boto_params = dict(aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
security_token=security_token)
# only set profile_name if passed as an argument
if profile_name:
boto_params['profile_name'] = profile_name
boto_params['validate_certs'] = validate_certs
for param, value in boto_params.items():
if isinstance(value, binary_type):
boto_params[param] = text_type(value, 'utf-8', 'strict')
return region, ec2_url, boto_params
def get_ec2_creds(module):
''' for compatibility mode with old modules that don't/can't yet
use ec2_connect method '''
region, ec2_url, boto_params = get_aws_connection_info(module)
return ec2_url, boto_params['aws_access_key_id'], boto_params['aws_secret_access_key'], region
def boto_fix_security_token_in_profile(conn, profile_name):
''' monkey patch for boto issue boto/boto#2100 '''
profile = 'profile ' + profile_name
if boto.config.has_option(profile, 'aws_security_token'):
conn.provider.set_security_token(boto.config.get(profile, 'aws_security_token'))
return conn
def connect_to_aws(aws_module, region, **params):
conn = aws_module.connect_to_region(region, **params)
if not conn:
if region not in [aws_module_region.name for aws_module_region in aws_module.regions()]:
raise AnsibleAWSError("Region %s does not seem to be available for aws module %s. If the region definitely exists, you may need to upgrade boto or extend with endpoints_path" % (region, aws_module.__name__))
else:
raise AnsibleAWSError("Unknown problem connecting to region %s for aws module %s." % (region, aws_module.__name__))
if params.get('profile_name'):
conn = boto_fix_security_token_in_profile(conn, params['profile_name'])
return conn
def ec2_connect(module):
""" Return an ec2 connection"""
region, ec2_url, boto_params = get_aws_connection_info(module)
# If we have a region specified, connect to its endpoint.
if region:
try:
ec2 = connect_to_aws(boto.ec2, region, **boto_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
# Otherwise, no region so we fallback to the old connection method
elif ec2_url:
try:
ec2 = boto.connect_ec2_endpoint(ec2_url, **boto_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="Either region or ec2_url must be specified")
return ec2
def paging(pause=0, marker_property='marker'):
""" Adds paging to boto retrieval functions that support a 'marker'
this is configurable as not all boto functions seem to use the
same name.
"""
def wrapper(f):
def page(*args, **kwargs):
results = []
marker = None
while True:
try:
new = f(*args, marker=marker, **kwargs)
marker = getattr(new, marker_property)
results.extend(new)
if not marker:
break
elif pause:
sleep(pause)
except TypeError:
# Older version of boto do not allow for marker param, just run normally
results = f(*args, **kwargs)
break
return results
return page
return wrapper
def camel_dict_to_snake_dict(camel_dict):
def camel_to_snake(name):
import re
first_cap_re = re.compile('(.)([A-Z][a-z]+)')
all_cap_re = re.compile('([a-z0-9])([A-Z])')
s1 = first_cap_re.sub(r'\1_\2', name)
return all_cap_re.sub(r'\1_\2', s1).lower()
def value_is_list(camel_list):
checked_list = []
for item in camel_list:
if isinstance(item, dict):
checked_list.append(camel_dict_to_snake_dict(item))
elif isinstance(item, list):
checked_list.append(value_is_list(item))
else:
checked_list.append(item)
return checked_list
snake_dict = {}
for k, v in camel_dict.items():
if isinstance(v, dict):
snake_dict[camel_to_snake(k)] = camel_dict_to_snake_dict(v)
elif isinstance(v, list):
snake_dict[camel_to_snake(k)] = value_is_list(v)
else:
snake_dict[camel_to_snake(k)] = v
return snake_dict
def snake_dict_to_camel_dict(snake_dict):
def camelize(complex_type):
if complex_type is None:
return
new_type = type(complex_type)()
if isinstance(complex_type, dict):
for key in complex_type:
new_type[camel(key)] = camelize(complex_type[key])
elif isinstance(complex_type, list):
for i in range(len(complex_type)):
new_type.append(camelize(complex_type[i]))
else:
return complex_type
return new_type
def camel(words):
return words.split('_')[0] + ''.join(x.capitalize() or '_' for x in words.split('_')[1:])
return camelize(snake_dict)
def ansible_dict_to_boto3_filter_list(filters_dict):
""" Convert an Ansible dict of filters to list of dicts that boto3 can use
Args:
filters_dict (dict): Dict of AWS filters.
Basic Usage:
>>> filters = {'some-aws-id', 'i-01234567'}
>>> ansible_dict_to_boto3_filter_list(filters)
{
'some-aws-id': 'i-01234567'
}
Returns:
List: List of AWS filters and their values
[
{
'Name': 'some-aws-id',
'Values': [
'i-01234567',
]
}
]
"""
filters_list = []
for k,v in filters_dict.items():
filter_dict = {'Name': k}
if isinstance(v, string_types):
filter_dict['Values'] = [v]
else:
filter_dict['Values'] = v
filters_list.append(filter_dict)
return filters_list
def boto3_tag_list_to_ansible_dict(tags_list):
""" Convert a boto3 list of resource tags to a flat dict of key:value pairs
Args:
tags_list (list): List of dicts representing AWS tags.
Basic Usage:
>>> tags_list = [{'Key': 'MyTagKey', 'Value': 'MyTagValue'}]
>>> boto3_tag_list_to_ansible_dict(tags_list)
[
{
'Key': 'MyTagKey',
'Value': 'MyTagValue'
}
]
Returns:
Dict: Dict of key:value pairs representing AWS tags
{
'MyTagKey': 'MyTagValue',
}
"""
tags_dict = {}
for tag in tags_list:
if 'key' in tag:
tags_dict[tag['key']] = tag['value']
elif 'Key' in tag:
tags_dict[tag['Key']] = tag['Value']
return tags_dict
def ansible_dict_to_boto3_tag_list(tags_dict):
""" Convert a flat dict of key:value pairs representing AWS resource tags to a boto3 list of dicts
Args:
tags_dict (dict): Dict representing AWS resource tags.
Basic Usage:
>>> tags_dict = {'MyTagKey': 'MyTagValue'}
>>> ansible_dict_to_boto3_tag_list(tags_dict)
{
'MyTagKey': 'MyTagValue'
}
Returns:
List: List of dicts containing tag keys and values
[
{
'Key': 'MyTagKey',
'Value': 'MyTagValue'
}
]
"""
tags_list = []
for k,v in tags_dict.items():
tags_list.append({'Key': k, 'Value': v})
return tags_list
def get_ec2_security_group_ids_from_names(sec_group_list, ec2_connection, vpc_id=None, boto3=True):
""" Return list of security group IDs from security group names. Note that security group names are not unique
across VPCs. If a name exists across multiple VPCs and no VPC ID is supplied, all matching IDs will be returned. This
will probably lead to a boto exception if you attempt to assign both IDs to a resource so ensure you wrap the call in
a try block
"""
def get_sg_name(sg, boto3):
if boto3:
return sg['GroupName']
else:
return sg.name
def get_sg_id(sg, boto3):
if boto3:
return sg['GroupId']
else:
return sg.id
sec_group_id_list = []
if isinstance(sec_group_list, string_types):
sec_group_list = [sec_group_list]
# Get all security groups
if boto3:
if vpc_id:
filters = [
{
'Name': 'vpc-id',
'Values': [
vpc_id,
]
}
]
all_sec_groups = ec2_connection.describe_security_groups(Filters=filters)['SecurityGroups']
else:
all_sec_groups = ec2_connection.describe_security_groups()['SecurityGroups']
else:
if vpc_id:
filters = { 'vpc-id': vpc_id }
all_sec_groups = ec2_connection.get_all_security_groups(filters=filters)
else:
all_sec_groups = ec2_connection.get_all_security_groups()
unmatched = set(sec_group_list).difference(str(get_sg_name(all_sg, boto3)) for all_sg in all_sec_groups)
sec_group_name_list = list(set(sec_group_list) - set(unmatched))
if len(unmatched) > 0:
# If we have unmatched names that look like an ID, assume they are
import re
sec_group_id_list[:] = [sg for sg in unmatched if re.match('sg-[a-fA-F0-9]+$', sg)]
still_unmatched = [sg for sg in unmatched if not re.match('sg-[a-fA-F0-9]+$', sg)]
if len(still_unmatched) > 0:
raise ValueError("The following group names are not valid: %s" % ', '.join(still_unmatched))
sec_group_id_list += [ str(get_sg_id(all_sg, boto3)) for all_sg in all_sec_groups if str(get_sg_name(all_sg, boto3)) in sec_group_name_list ]
return sec_group_id_list
def sort_json_policy_dict(policy_dict):
""" Sort any lists in an IAM JSON policy so that comparison of two policies with identical values but
different orders will return true
Args:
policy_dict (dict): Dict representing IAM JSON policy.
Basic Usage:
>>> my_iam_policy = {'Principle': {'AWS':["31","7","14","101"]}
>>> sort_json_policy_dict(my_iam_policy)
Returns:
Dict: Will return a copy of the policy as a Dict but any List will be sorted
{
'Principle': {
'AWS': [ '7', '14', '31', '101' ]
}
}
"""
def value_is_list(my_list):
checked_list = []
for item in my_list:
if isinstance(item, dict):
checked_list.append(sort_json_policy_dict(item))
elif isinstance(item, list):
checked_list.append(value_is_list(item))
else:
checked_list.append(item)
# Sort list. If it's a list of dictionaries, sort by tuple of key-value
# pairs, since Python 3 doesn't allow comparisons such as `<` between dictionaries.
checked_list.sort(key=lambda x: sorted(x.items()) if isinstance(x, dict) else x)
return checked_list
ordered_policy_dict = {}
for key, value in policy_dict.items():
if isinstance(value, dict):
ordered_policy_dict[key] = sort_json_policy_dict(value)
elif isinstance(value, list):
ordered_policy_dict[key] = value_is_list(value)
else:
ordered_policy_dict[key] = value
return ordered_policy_dict
def map_complex_type(complex_type, type_map):
"""
Allows to cast elements within a dictionary to a specific type
Example of usage:
DEPLOYMENT_CONFIGURATION_TYPE_MAP = {
'maximum_percent': 'int',
'minimum_healthy_percent': 'int'
}
deployment_configuration = map_complex_type(module.params['deployment_configuration'],
DEPLOYMENT_CONFIGURATION_TYPE_MAP)
This ensures all keys within the root element are casted and valid integers
"""
if complex_type is None:
return
new_type = type(complex_type)()
if isinstance(complex_type, dict):
for key in complex_type:
if key in type_map:
if isinstance(type_map[key], list):
new_type[key] = map_complex_type(
complex_type[key],
type_map[key][0])
else:
new_type[key] = map_complex_type(
complex_type[key],
type_map[key])
else:
return complex_type
elif isinstance(complex_type, list):
for i in range(len(complex_type)):
new_type.append(map_complex_type(
complex_type[i],
type_map))
elif type_map:
return globals()['__builtins__'][type_map](complex_type)
return new_type
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
def readfile(fn):
"""Read fn and return the contents."""
with open(path.join(here, fn), "r", encoding="utf-8") as f:
return f.read()
setup(
name="usfm2osis",
packages=find_packages(exclude=["tests*"]),
version="0.6.1",
description="Tools for converting Bibles from USFM to OSIS XML",
author="Christopher C. Little",
author_email="chrisclittle+usfm2osis@gmail.com",
url="https://github.com/chrislit/usfm2osis",
download_url="https://github.com/chrislit/usfm2osis/archive/master.zip",
keywords=["OSIS", "USFM", "Bible"],
license="GPLv3+",
zip_safe=False,
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 or later \
(GPLv3+)",
"Operating System :: OS Independent",
"Natural Language :: English",
"Intended Audience :: Religion",
"Intended Audience :: Developers",
"Topic :: Religion",
"Topic :: Text Processing :: Markup :: XML",
],
long_description="\n\n".join([readfile(f) for f in ("README.rst",)]),
# scripts=['scripts/usfm2osis', 'scripts/usfmtags'],
package_data={"usfm2osis": ["schemas/*.xsd"]},
entry_points={
"console_scripts": [
"usfm2osis = usfm2osis.scripts.usfm2osis:main",
"usfmtags = usfm2osis.scripts.usfmtags:main",
]
},
)
|
import unittest
from tempfile import NamedTemporaryFile
import os
import numpy as np
from Orange.data import ContinuousVariable, DiscreteVariable
from Orange.data.io import CSVFormat
tab_file = """\
Feature 1\tFeature 2\tFeature 3
1.0 \t1.3 \t5
2.0 \t42 \t7
"""
csv_file = """\
Feature 1, Feature 2,Feature 3
1.0, 1.3, 5
2.0, 42, 7
"""
tab_file_nh = """\
1.0 \t1.3 \t5
2.0 \t42 \t7
"""
csv_file_nh = """\
1.0, 1.3, 5
2.0, 42, 7
"""
class TestTabReader(unittest.TestCase):
def read_easy(self, s, name):
file = NamedTemporaryFile("wt", delete=False)
filename = file.name
try:
file.write(s)
file.close()
table = CSVFormat().read_file(filename)
f1, f2, f3 = table.domain.variables
self.assertIsInstance(f1, DiscreteVariable)
self.assertEqual(f1.name, name + "1")
self.assertIsInstance(f2, ContinuousVariable)
self.assertEqual(f2.name, name + "2")
self.assertIsInstance(f3, ContinuousVariable)
self.assertEqual(f3.name, name + "3")
self.assertEqual(len(table.domain.class_vars), 1)
finally:
os.remove(filename)
def test_read_tab(self):
self.read_easy(tab_file, "Feature ")
self.read_easy(tab_file_nh, "Feature ")
def test_read_csv(self):
self.read_easy(csv_file, "Feature ")
self.read_easy(csv_file_nh, "Feature ")
|
from __future__ import unicode_literals
import unicodecsv
from datetime import datetime, timedelta
from pytz import timezone
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.views.generic import UpdateView, FormView, TemplateView, CreateView
from neonet.views import LoggedInMixin
from DamageReports import models
from DamageReports import forms
class DamageReports(LoggedInMixin, FormView):
template_name = 'DamageReports/list.html'
form_class = forms.DamageReportsDateFilter
now = datetime.now(timezone('Europe/Warsaw'))
yesterday = now - timedelta(days=1)
initial = {'date_from': yesterday, 'date_to': now}
def form_valid(self, form):
reports = models.DamageReport.objects.select_related('commodity').filter(date__range=(
form.cleaned_data['date_from'], form.cleaned_data['date_to']))
return self.render_to_response(self.get_context_data(form=form, reports=reports))
class DamageReportsCreate(LoggedInMixin, CreateView):
model = models.DamageReport
template_name = 'DamageReports/create.html'
form_class = forms.DamageReportForm
now = datetime.now(timezone('Europe/Warsaw'))
initial = {'date': now}
def get_success_url(self):
return reverse('DamageReports:damage_reports_view')
def form_valid(self, form):
report = form.save(commit=False)
report.user = self.request.user
report.save()
return super(DamageReportsCreate, self).form_valid(form)
class DamageReportsUpdate(LoggedInMixin, UpdateView):
model = models.DamageReport
template_name = 'DamageReports/update.html'
form_class = forms.DamageReportForm
def get_success_url(self):
return reverse('DamageReports:list')
def get_initial(self):
initial = self.initial.copy()
initial['ean'] = self.get_object().commodity.ean
return initial
class DamageReportsExport(LoggedInMixin, FormView):
template_name = 'DamageReports/export.html'
form_class = forms.DamageReportsDateFilter
now = datetime.now(timezone('Europe/Warsaw'))
yesterday = now - timedelta(days=1)
initial = {'date_from': yesterday, 'date_to': now}
def form_valid(self, form):
response = HttpResponse(content_type='text/csv')
response['content-disposition'] = 'attachment; filename="reports.csv.txt"'
data = models.DamageReport.objects.\
select_related('commodity', 'detection_time', 'category', 'further_action', 'user').\
filter(date__range=(form.cleaned_data['date_from'], form.cleaned_data['date_to']))
writer = unicodecsv.writer(response, delimiter=b';')
if not data:
writer.writerow('Nie znaleziono żadnych raportów')
else:
for report in data:
row = ['', unicode(report.date), report.brand, report.commodity.__unicode__(), report.serial,
report.detection_time.detection_time, report.category.category, report.comments,
report.further_action.further_action, '', '',
(report.user.first_name + ' ' + report.user.last_name)
]
row = [element.strip() for element in row]
writer.writerow(row)
return response
class DamageReportsCharts(LoggedInMixin, TemplateView):
template_name = 'DamageReports/charts.html'
def get_context_data(self, **kwargs):
context = super(DamageReportsCharts, self).get_context_data(**kwargs)
context['chart'] = self._view()
return context
def _view(self):
self.a = {}
self.b = {}
self.c = {}
objects = models.DamageReport.objects.select_related('category').order_by('-date')
for report in objects:
_date = report.day_str()
if _date not in self.a:
self.a[_date] = 0
if _date not in self.b:
self.b[_date] = 0
if _date not in self.c:
self.c[_date] = 0
getattr(self, report.category.category.lower())[_date] += 1
reports = [{'data': [], 'name': 'A'},
{'data': [], 'name': 'B'},
{'data': [], 'name': 'C'}]
for k, v in self.a.iteritems():
reports[0]['data'].append([k, v])
for k, v in self.b.iteritems():
reports[1]['data'].append([k, v])
for k, v in self.c.iteritems():
reports[2]['data'].append([k, v])
return reports
|
'''
Scheduler essential class
'''
import types, socket
job_state = ['waiting','running','error','finished']
queue_state = ['active', 'hold']
host_state = ['up', 'down', 'error']
class BaseScheduler(object) :
'''
Base scheduler class
'''
def __init__(self, conf={}, **kw) :
'''
Initialization
'''
self.name = ''
self.default_options = conf.get('default_options', {})
self.job_env_vars = {}
def submit(self, script, opts = {}, **kw) :
pass
def submit_bulk(self, script, ntask, opts = {}, **kw) :
pass
def list(self, filter = None, **kw):
pass
def status(self, jid, **kw):
pass
def cancel(self, jid_list, **kw):
pass
def hosts(self, **kw) :
'''
This function only give a list of host(s) managed by this scheduler
slot used / total may or may not be set for this function
'''
pass
def queues(self, **kw) :
'''This function will give both list of queue and host associated with each queue'''
pass
def job_script_var(self, script) :
'''
Substitute special character in job script with scheduler-specific job script environment
@type script string
@param script input job script
@rtype string
@return patched job script
NOTE: rely on self.job_env_vars
'''
new_script = script
for key, value in self.job_env_vars.iteritems() :
new_script = new_script.replace('@' + key + '@', value)
return new_script
class Host(object) :
def __init__(self, **kw) :
self.name = kw.get('name', '')
# slot used and total is used to get number of job running in the host
self.slot_used = kw.get('slot_used', 0)
self.slot_total = kw.get('slot_total', 0)
self.np = kw.get('np', 0)
self.loadavg = kw.get('loadavg', 0)
self.set_state(kw.get('state', 'down'))
def get_state(self) :
return self._state
def set_state(self, state) :
assert state in host_state
self._state = state
state = property(get_state, set_state)
def __repr__(self) :
return '<Host %(name)s,%(np)d,%(slot_used)d/%(slot_total)d,%(_state)s,%(loadavg).1f>' % vars(self)
def __eq__(self, other) :
# if host is logically equal (by ip address and name).
name = None
if type(other) == types.StringType or type(other) == types.UnicodeType :
name = other
elif type(other) == type(self) :
if hasattr(other, 'name') :
name = other.name
if name is None :
return False
try :
result1 = socket.getfqdn(self.name)
result2 = socket.getfqdn(name)
return result1 == result2
except :
return False
def __ne__(self, other) :
return not self.__eq__(other)
class Queue(object) :
def __init__(self, **kw) :
self.name = kw.get('name', '')
self.slot_used = kw.get('slot_used', 0)
self.slot_total = kw.get('slot_total', 0)
self.loadavg = kw.get('loadavg', 0)
self.set_online_hosts(kw.get('online_hosts', None))
self.set_offline_hosts(kw.get('offline_hosts', None))
self.set_state(kw.get('state', 'active'))
def get_state(self) :
return self._state
def set_state(self, state) :
assert state in queue_state
self._state = state
state = property(get_state, set_state)
def get_online_hosts(self) :
return self._online_hosts
def set_online_hosts(self, online_hosts) :
self._online_hosts = online_hosts
online_hosts = property(get_online_hosts, set_online_hosts)
def get_offline_hosts(self) :
return self._offline_hosts
def set_offline_hosts(self, offline_hosts) :
self._offline_hosts = offline_hosts
offline_hosts = property(get_offline_hosts, set_offline_hosts)
def __repr__(self) :
retval = '<Q %(name)s,%(_state)s,%(slot_used)d,%(slot_total)d,%(loadavg).1f>' % vars(self)
if self._online_hosts :
for host in self._online_hosts :
retval = retval + '\n\tOn:%s' % str(host)
if self._offline_hosts :
for host in self._offline_hosts :
retval = retval + '\n\tOff:%s' % str(host)
retval = retval + '\n'
return retval
class JobInfo(object):
def __init__(self, **kw):
self.jid = kw.get('jid', None)
self.tid = kw.get('tid', None)
self.name = kw.get('name', '')
self.owner = kw.get('owner', '')
self.queue = kw.get('queue', '')
self.account = kw.get('account', '')
self._np = kw.get('np', 1)
if kw.has_key('np') :
self.set_np(kw['np'])
self._state = 'waiting'
if kw.has_key('state') :
self.set_state(kw['state'])
self.host = None
self.submittime = kw.get('submittime', None)
self.starttime = kw.get('starttime', None)
self.scheduler = kw.get('scheduler', '')
self.scheduler_host = kw.get('scheduler_host', '')
def __repr__(self):
return '<job %s,%s,%s,%s,%s,%d,%s,%s,%s,%s,%s,%s>' % \
(self.jid,self.tid,self.name,self.owner,self.queue,self.np,self.state,self.scheduler,self.scheduler_host, self.host,self.submittime,self.starttime)
def get_np(self):
return self._np
def set_np(self,v):
self._np = int(v)
np = property(get_np, set_np)
def get_state(self):
return self._state
def set_state(self,v):
assert v in job_state
self._state = v
state = property(get_state, set_state)
def jidparse(jid_str) :
pass
def jidunparse(tuple) :
pass
if __name__ == '__main__' :
h1 = Host(name = 'compute-0-0')
h2 = Host(name = 'compute-0-1')
h3 = Host(name = 'compute-0-x')
#Queue(name = 'test')
#j = JobInfo()
print 'compute-0-0 == compute-0-1 : ', h1 == h2
print 'compute-0-1 == compute-0-0 : ', h2 == h1
print 'compute-0-0 == compute-0-0 : ', h1 == h1
print 'compute-0-0 == compute-0-0 (string) : ', h1 == 'compute-0-0'
print 'compute-0-x == compute-0-0 (string) : ', h3 == 'compute-0-0'
|
"""Provides utilities to preprocess images in CIFAR-10.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
_PADDING = 4
slim = tf.contrib.slim
def preprocess_for_train(image,
output_height,
output_width,
padding=_PADDING):
"""Preprocesses the given image for training.
Note that the actual resizing scale is sampled from
[`resize_size_min`, `resize_size_max`].
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
padding: The amound of padding before and after each dimension of the image.
Returns:
A preprocessed image.
"""
tf.summary.image('image', tf.expand_dims(image, 0))
# Transform the image to floats.
image = tf.to_float(image)
if padding > 0:
image = tf.pad(image, [[padding, padding], [padding, padding], [0, 0]])
# image = tf.image.resize_images(image,(output_height,output_width))
# Randomly crop a [height, width] section of the image.
distorted_image = tf.random_crop(image,
[32, 32, 3])
# Randomly flip the image horizontally.
distorted_image = tf.image.random_flip_left_right(distorted_image)
tf.summary.image('distorted_image', tf.expand_dims(distorted_image, 0))
# Because these operations are not commutative, consider randomizing
# the order their operation.
distorted_image = tf.image.random_brightness(distorted_image,
max_delta=63)
distorted_image = tf.image.random_contrast(distorted_image,
lower=0.2, upper=1.8)
# Subtract off the mean and divide by the variance of the pixels.
return tf.image.per_image_standardization(distorted_image)
def preprocess_for_eval(image, output_height, output_width):
"""Preprocesses the given image for evaluation.
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
Returns:
A preprocessed image.
"""
tf.summary.image('image', tf.expand_dims(image, 0))
# Transform the image to floats.
image = tf.to_float(image)
# image = tf.image.resize_images(image, (output_height, output_width))
# Resize and crop if needed.
resized_image = tf.image.resize_image_with_crop_or_pad(image,
output_width,
output_height)
tf.summary.image('resized_image', tf.expand_dims(resized_image, 0))
# Subtract off the mean and divide by the variance of the pixels.
return tf.image.per_image_standardization(resized_image)
def preprocess_image(image, output_height, output_width, is_training=False):
"""Preprocesses the given image.
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
is_training: `True` if we're preprocessing the image for training and
`False` otherwise.
Returns:
A preprocessed image.
"""
if is_training:
return preprocess_for_train(image, output_height, output_width)
else:
return preprocess_for_eval(image, output_height, output_width)
|
"""Provide various handy Python functions.
Running this script directly will execute the doctests.
Functions:
int2bin(i, n) -- Convert integer to binary string.
bin2int(bin_string) -- Convert binary string to integer.
reverse(input_string) -- Reverse a string.
transpose(matrix) -- Transpose a list of lists.
polygon_area(points_list) -- Calculate the area of an arbitrary polygon.
timestamp() -- Return string containing current time stamp.
pt2str(point) -- Return prettier string version of point tuple.
gcf(a, b) -- Return the greatest common factor of two numbers.
lcm(a, b) -- Return the least common multiple of two numbers.
permutations(input_list) -- Generate all permutations of a list of items.
reduce_fraction(fraction) -- Reduce fraction (num, denom) to simplest form.
quantile(l, p) -- Return p quantile of list l. E.g. p=0.25 for q1.
trim(l) -- Discard values in list more than 1.5*IQR outside IQR.
nice_units(value) -- Return value converted to human readable units.
uniquify(seq) -- Return sequence with duplicate items in sequence seq removed.
reverse_dict(d) -- Return the dictionary with the items as keys and vice-versa.
lsb(x, n) -- Return the n least significant bits of x.
gray_encode(i) -- Gray encode the given integer.
random_vec(bits, max_value=None) -- Return a random binary vector.
binary_range(bits) -- Return list of all possible binary numbers width=bits.
float_range([start], stop, [step]) -- Return range of floats.
find_common_fixes(s1, s2) -- Find common (prefix, suffix) of two strings.
is_rotated(seq1, seq2) -- Return true if the list is a rotation of other list.
getmodule(obj) -- Return the module that contains the object definition of obj.
(use inspect.getmodule instead, though)
get_args(argv) -- Store command-line args in a dictionary.
This module requires Python >= 2.2
"""
__author__ = 'Tim Wegener <twegener@radlogic.com.au>'
__date__ = '$Date: 2007/03/27 03:15:06 $'
__version__ = '$Revision: 0.45 $'
__credits__ = """
David Chandler, for polygon area algorithm.
(http://www.davidchandler.com/AreaOfAGeneralPolygon.pdf)
"""
import re
import sys
import time
import random
try:
True, False
except NameError:
True, False = (1==1, 0==1)
def int2bin(i, n):
"""Convert decimal integer i to n-bit binary number (string).
>>> int2bin(0, 8)
'00000000'
>>> int2bin(123, 8)
'01111011'
>>> int2bin(123L, 8)
'01111011'
>>> int2bin(15, 2)
Traceback (most recent call last):
ValueError: Value too large for given number of bits.
"""
hex2bin = {'0': '0000', '1': '0001', '2': '0010', '3': '0011',
'4': '0100', '5': '0101', '6': '0110', '7': '0111',
'8': '1000', '9': '1001', 'a': '1010', 'b': '1011',
'c': '1100', 'd': '1101', 'e': '1110', 'f': '1111'}
# Convert to hex then map each hex digit to binary equivalent.
result = ''.join([hex2bin[x] for x in hex(i).lower().replace('l','')[2:]])
# Shrink result to appropriate length.
# Raise an error if the value is changed by the truncation.
if '1' in result[:-n]:
raise ValueError("Value too large for given number of bits.")
result = result[-n:]
# Zero-pad if length longer than mapped result.
result = '0'*(n-len(result)) + result
return result
def bin2int(bin_string):
"""Convert binary number string to decimal integer.
Note: Python > v2 has int(bin_string, 2)
>>> bin2int('1111')
15
>>> bin2int('0101')
5
"""
## result = 0
## bin_list = list(bin_string)
## if len(filter(lambda x: x in ('1','0'), bin_list)) < len(bin_list):
## raise Exception ("bin2int: Error - not a binary number: %s"
## % bin_string)
## bit_list = map(int, bin_list)
## bit_list.reverse() # Make most significant bit have highest index.
## for bit_place in range(len(bit_list)):
## result = result + ((2**bit_place) * bit_list[bit_place])
## return result
return int(bin_string, 2)
def reverse(input_string):
"""Reverse a string. Useful for strings of binary numbers.
>>> reverse('abc')
'cba'
"""
str_list = list(input_string)
str_list.reverse()
return ''.join(str_list)
def transpose(matrix):
"""Transpose a list of lists.
>>> transpose([['a', 'b', 'c'], ['d', 'e', 'f'], ['g', 'h', 'i']])
[['a', 'd', 'g'], ['b', 'e', 'h'], ['c', 'f', 'i']]
>>> transpose([['a', 'b', 'c'], ['d', 'e', 'f']])
[['a', 'd'], ['b', 'e'], ['c', 'f']]
>>> transpose([['a', 'b'], ['d', 'e'], ['g', 'h']])
[['a', 'd', 'g'], ['b', 'e', 'h']]
"""
result = zip(*matrix)
# Convert list of tuples to list of lists.
# map is faster than a list comprehension since it is being used with
# a built-in function as an argument.
result = map(list, result)
return result
def polygon_area(points_list, precision=100):
"""Calculate area of an arbitrary polygon using an algorithm from the web.
Return the area of the polygon as a positive float.
Arguments:
points_list -- list of point tuples [(x0, y0), (x1, y1), (x2, y2), ...]
(Unclosed polygons will be closed automatically.
precision -- Internal arithmetic precision (integer arithmetic).
>>> polygon_area([(0, 0), (0, 1), (1, 1), (1, 2), (2, 2), (2, 0), (0, 0)])
3.0
Credits:
Area of a General Polygon by David Chandler
http://www.davidchandler.com/AreaOfAGeneralPolygon.pdf
"""
# Scale up co-ordinates and convert them to integers.
for i in range(len(points_list)):
points_list[i] = (int(points_list[i][0] * precision),
int(points_list[i][1] * precision))
# Close polygon if not closed.
if points_list[-1] != points_list[0]:
points_list.append(points_list[0])
# Calculate area.
area = 0
for i in range(len(points_list)-1):
(x_i, y_i) = points_list[i]
(x_i_plus_1, y_i_plus_1) = points_list[i+1]
area = area + (x_i_plus_1 * y_i) - (y_i_plus_1 * x_i)
area = abs(area / 2)
# Unscale area.
area = float(area)/(precision**2)
return area
def timestamp():
"""Return string containing current time stamp.
Note: In Python 2 onwards can use time.asctime() with no arguments.
"""
return time.asctime()
def pt2str(point):
"""Return prettier string version of point tuple.
>>> pt2str((1.8, 1.9))
'(1.8, 1.9)'
"""
return "(%s, %s)" % (str(point[0]), str(point[1]))
def gcf(a, b, epsilon=1e-16):
"""Return the greatest common factor of a and b, using Euclidean algorithm.
Arguments:
a, b -- two numbers
If both numbers are integers return an integer result,
otherwise return a float result.
epsilon -- floats less than this magnitude are considered to be zero
(default: 1e-16)
Examples:
>>> gcf(12, 34)
2
>>> gcf(13.5, 4)
0.5
>>> gcf(-2, 4)
2
>>> gcf(5, 0)
5
By (a convenient) definition:
>>> gcf(0, 0)
0
"""
result = max(a, b)
remainder = min(a, b)
while remainder and abs(remainder) > epsilon:
new_remainder = result % remainder
result = remainder
remainder = new_remainder
return abs(result)
def lcm(a, b, precision=None):
"""Return the least common multiple of a and b, using the gcf function.
Arguments:
a, b -- two numbers. If both are integers return an integer result,
otherwise a return a float result.
precision -- scaling factor if a and/or b are floats.
>>> lcm(21, 6)
42
>>> lcm(2.5, 3.5)
17.5
>>> str(lcm(1.5e-8, 2.5e-8, precision=1e9))
'7.5e-08'
By (an arbitary) definition:
>>> lcm(0, 0)
0
"""
# Note: Dummy precision argument is for backwards compatibility.
# Do the division first.
# (See http://en.wikipedia.org/wiki/Least_common_multiple )
denom = gcf(a, b)
if denom == 0:
result = 0
else:
result = a * (b / denom)
return result
def permutations(input_list):
"""Return a list containing all permutations of the input list.
Note: This is a recursive function.
>>> perms = permutations(['a', 'b', 'c'])
>>> perms.sort()
>>> for perm in perms:
... print perm
['a', 'b', 'c']
['a', 'c', 'b']
['b', 'a', 'c']
['b', 'c', 'a']
['c', 'a', 'b']
['c', 'b', 'a']
"""
out_lists = []
if len(input_list) > 1:
# Extract first item in list.
item = input_list[0]
# Find all permutations of remainder of list. (Recursive call.)
sub_lists = permutations(input_list[1:])
# For every permutation of the sub list...
for sub_list in sub_lists:
# Insert the extracted first item at every position of the list.
for i in range(len(input_list)):
new_list = sub_list[:]
new_list.insert(i, item)
out_lists.append(new_list)
else:
# Termination condition: only one item in input list.
out_lists = [input_list]
return out_lists
def reduce_fraction(fraction):
"""Reduce fraction tuple to simplest form. fraction=(num, denom)
>>> reduce_fraction((14, 7))
(2, 1)
>>> reduce_fraction((-2, 4))
(-1, 2)
>>> reduce_fraction((0, 4))
(0, 1)
>>> reduce_fraction((4, 0))
(1, 0)
"""
(numerator, denominator) = fraction
common_factor = abs(gcf(numerator, denominator))
result = (numerator/common_factor, denominator/common_factor)
return result
def quantile(l, p):
"""Return p quantile of list l. E.g. p=0.25 for q1.
See:
http://rweb.stat.umn.edu/R/library/base/html/quantile.html
"""
l_sort = l[:]
l_sort.sort()
n = len(l)
r = 1 + ((n - 1) * p)
i = int(r)
f = r - i
if i < n:
result = (1-f)*l_sort[i-1] + f*l_sort[i]
else:
result = l_sort[i-1]
return result
def trim(l):
"""Discard values in list more than 1.5*IQR outside IQR.
(IQR is inter-quartile-range)
This function uses rad_util.quantile
1.5*IQR -- mild outlier
3*IQR -- extreme outlier
See:
http://wind.cc.whecn.edu/~pwildman/statnew/section_7_-_exploratory_data_analysis.htm
"""
l_sort = l[:]
l_sort.sort()
# Calculate medianscore (based on stats.py lmedianscore by Gary Strangman)
if len(l_sort) % 2 == 0:
# If even number of scores, average middle 2.
index = int(len(l_sort) / 2) # Integer division correct
median = float(l_sort[index] + l_sort[index-1]) / 2
else:
# int divsion gives mid value when count from 0
index = int(len(l_sort) / 2)
median = l_sort[index]
# Calculate IQR.
q1 = quantile(l_sort, 0.25)
q3 = quantile(l_sort, 0.75)
iqr = q3 - q1
iqr_extra = iqr * 1.5
def in_interval(x, i=iqr_extra, q1=q1, q3=q3):
return (x >= q1-i and x <= q3+i)
l_trimmed = [x for x in l_sort if in_interval(x)]
return l_trimmed
def nice_units(value, dp=0, sigfigs=None, suffix='', space=' ',
use_extra_prefixes=False, use_full_name=False, mode='si'):
"""Return value converted to human readable units eg milli, micro, etc.
Arguments:
value -- number in base units
dp -- number of decimal places to display (rounded)
sigfigs -- number of significant figures to display (rounded)
This overrides dp if set.
suffix -- optional unit suffix to append to unit multiplier
space -- seperator between value and unit multiplier (default: ' ')
use_extra_prefixes -- use hecto, deka, deci and centi as well if set.
(default: False)
use_full_name -- use full name for multiplier symbol,
e.g. milli instead of m
(default: False)
mode -- 'si' for SI prefixes, 'bin' for binary multipliers (1024, etc.)
(Default: 'si')
SI prefixes from:
http://physics.nist.gov/cuu/Units/prefixes.html
(Greek mu changed to u.)
Binary prefixes based on:
http://physics.nist.gov/cuu/Units/binary.html
>>> nice_units(2e-11)
'20 p'
>>> nice_units(2e-11, space='')
'20p'
"""
si_prefixes = {1e24: ('Y', 'yotta'),
1e21: ('Z', 'zetta'),
1e18: ('E', 'exa'),
1e15: ('P', 'peta'),
1e12: ('T', 'tera'),
1e9: ('G', 'giga'),
1e6: ('M', 'mega'),
1e3: ('k', 'kilo'),
1e-3: ('m', 'milli'),
1e-6: ('u', 'micro'),
1e-9: ('n', 'nano'),
1e-12: ('p', 'pico'),
1e-15: ('f', 'femto'),
1e-18: ('a', 'atto'),
1e-21: ('z', 'zepto'),
1e-24: ('y', 'yocto')
}
if use_extra_prefixes:
si_prefixes.update({1e2: ('h', 'hecto'),
1e1: ('da', 'deka'),
1e-1: ('d', 'deci'),
1e-2: ('c', 'centi')
})
bin_prefixes = {2**10: ('K', 'kilo'),
2**20: ('M', 'mega'),
2**30: ('G', 'mega'),
2**40: ('T', 'tera'),
2**50: ('P', 'peta'),
2**60: ('E', 'exa')
}
if mode == 'bin':
prefixes = bin_prefixes
else:
prefixes = si_prefixes
prefixes[1] = ('', '') # Unity.
# Determine appropriate multiplier.
multipliers = prefixes.keys()
multipliers.sort()
mult = None
for i in range(len(multipliers) - 1):
lower_mult = multipliers[i]
upper_mult = multipliers[i+1]
if lower_mult <= value < upper_mult:
mult_i = i
break
if mult is None:
if value < multipliers[0]:
mult_i = 0
elif value >= multipliers[-1]:
mult_i = len(multipliers) - 1
mult = multipliers[mult_i]
# Convert value for this multiplier.
new_value = value / mult
# Deal with special case due to rounding.
if sigfigs is None:
if mult_i < (len(multipliers) - 1) and\
round(new_value, dp) ==\
round((multipliers[mult_i+1] / mult), dp):
mult = multipliers[mult_i + 1]
new_value = value / mult
# Concatenate multiplier symbol.
if use_full_name:
label_type = 1
else:
label_type = 0
# Round and truncate to appropriate precision.
if sigfigs is None:
str_value = eval('"%.'+str(dp)+'f" % new_value', locals(), {})
else:
str_value = eval('"%.'+str(sigfigs)+'g" % new_value', locals(), {})
return str_value + space + prefixes[mult][label_type] + suffix
def uniquify(seq, preserve_order=False):
"""Return sequence with duplicate items in sequence seq removed.
The code is based on usenet post by Tim Peters.
This code is O(N) if the sequence items are hashable, O(N**2) if not.
Peter Bengtsson has a blog post with an empirical comparison of other
approaches:
http://www.peterbe.com/plog/uniqifiers-benchmark
If order is not important and the sequence items are hashable then
list(set(seq)) is readable and efficient.
If order is important and the sequence items are hashable generator
expressions can be used (in py >= 2.4) (useful for large sequences):
seen = set()
do_something(x for x in seq if x not in seen or seen.add(x))
Arguments:
seq -- sequence
preserve_order -- if not set the order will be arbitrary
Using this option will incur a speed penalty.
(default: False)
Example showing order preservation:
>>> uniquify(['a', 'aa', 'b', 'b', 'ccc', 'ccc', 'd'], preserve_order=True)
['a', 'aa', 'b', 'ccc', 'd']
Example using a sequence of un-hashable items:
>>> uniquify([['z'], ['x'], ['y'], ['z']], preserve_order=True)
[['z'], ['x'], ['y']]
The sorted output or the non-order-preserving approach should equal
that of the sorted order-preserving approach output:
>>> unordered = uniquify([3, 3, 1, 2], preserve_order=False)
>>> unordered.sort()
>>> ordered = uniquify([3, 3, 1, 2], preserve_order=True)
>>> ordered.sort()
>>> ordered
[1, 2, 3]
>>> int(ordered == unordered)
1
"""
try:
# Attempt fast algorithm.
d = {}
if preserve_order:
# This is based on Dave Kirby's method (f8) noted in the post:
# http://www.peterbe.com/plog/uniqifiers-benchmark
return [x for x in seq if (x not in d) and not d.__setitem__(x, 0)]
else:
for x in seq:
d[x] = 0
return d.keys()
except TypeError:
# Have an unhashable object, so use slow algorithm.
result = []
app = result.append
for x in seq:
if x not in result:
app(x)
return result
unique = uniquify
def reverse_dict(d):
"""Reverse a dictionary so the items become the keys and vice-versa.
Note: The results will be arbitrary if the items are not unique.
>>> d = reverse_dict({'a': 1, 'b': 2})
>>> d_items = d.items()
>>> d_items.sort()
>>> d_items
[(1, 'a'), (2, 'b')]
"""
result = {}
for key, value in d.items():
result[value] = key
return result
def lsb(x, n):
"""Return the n least significant bits of x.
>>> lsb(13, 3)
5
"""
return x & ((2 ** n) - 1)
def gray_encode(i):
"""Gray encode the given integer."""
return i ^ (i >> 1)
def random_vec(bits, max_value=None):
"""Generate a random binary vector of length bits and given max value."""
vector = ""
for _ in range(int(bits / 10) + 1):
i = int((2**10) * random.random())
vector += int2bin(i, 10)
if max_value and (max_value < 2 ** bits - 1):
vector = int2bin((int(vector, 2) / (2 ** bits - 1)) * max_value, bits)
return vector[0:bits]
def binary_range(bits):
"""Return a list of all possible binary numbers in order with width=bits.
It would be nice to extend it to match the
functionality of python's range() built-in function.
"""
l = []
v = ['0'] * bits
toggle = [1] + [0] * bits
while toggle[bits] != 1:
v_copy = v[:]
v_copy.reverse()
l.append(''.join(v_copy))
toggle = [1] + [0]*bits
i = 0
while i < bits and toggle[i] == 1:
if toggle[i]:
if v[i] == '0':
v[i] = '1'
toggle[i+1] = 0
else:
v[i] = '0'
toggle[i+1] = 1
i += 1
return l
def float_range(start, stop=None, step=None):
"""Return a list containing an arithmetic progression of floats.
Return a list of floats between 0.0 (or start) and stop with an
increment of step.
This is in functionality to python's range() built-in function
but can accept float increments.
As with range(), stop is omitted from the list.
"""
if stop is None:
stop = float(start)
start = 0.0
if step is None:
step = 1.0
cur = float(start)
l = []
while cur < stop:
l.append(cur)
cur += step
return l
def find_common_fixes(s1, s2):
"""Find common (prefix, suffix) of two strings.
>>> find_common_fixes('abc', 'def')
('', '')
>>> find_common_fixes('abcelephantdef', 'abccowdef')
('abc', 'def')
>>> find_common_fixes('abcelephantdef', 'abccow')
('abc', '')
>>> find_common_fixes('elephantdef', 'abccowdef')
('', 'def')
"""
prefix = []
suffix = []
i = 0
common_len = min(len(s1), len(s2))
while i < common_len:
if s1[i] != s2[i]:
break
prefix.append(s1[i])
i += 1
i = 1
while i < (common_len + 1):
if s1[-i] != s2[-i]:
break
suffix.append(s1[-i])
i += 1
suffix.reverse()
prefix = ''.join(prefix)
suffix = ''.join(suffix)
return (prefix, suffix)
def is_rotated(seq1, seq2):
"""Return true if the first sequence is a rotation of the second sequence.
>>> seq1 = ['A', 'B', 'C', 'D']
>>> seq2 = ['C', 'D', 'A', 'B']
>>> int(is_rotated(seq1, seq2))
1
>>> seq2 = ['C', 'D', 'B', 'A']
>>> int(is_rotated(seq1, seq2))
0
>>> seq1 = ['A', 'B', 'C', 'A']
>>> seq2 = ['A', 'A', 'B', 'C']
>>> int(is_rotated(seq1, seq2))
1
>>> seq2 = ['A', 'B', 'C', 'A']
>>> int(is_rotated(seq1, seq2))
1
>>> seq2 = ['A', 'A', 'C', 'B']
>>> int(is_rotated(seq1, seq2))
0
"""
# Do a sanity check.
if len(seq1) != len(seq2):
return False
# Look for occurrences of second sequence head item in first sequence.
start_indexes = []
head_item = seq2[0]
for index1 in range(len(seq1)):
if seq1[index1] == head_item:
start_indexes.append(index1)
# Check that wrapped sequence matches.
double_seq1 = seq1 + seq1
for index1 in start_indexes:
if double_seq1[index1:index1+len(seq1)] == seq2:
return True
return False
def getmodule(obj):
"""Return the module that contains the object definition of obj.
Note: Use inspect.getmodule instead.
Arguments:
obj -- python obj, generally a class or a function
Examples:
A function:
>>> module = getmodule(random.choice)
>>> module.__name__
'random'
>>> module is random
1
A class:
>>> module = getmodule(random.Random)
>>> module.__name__
'random'
>>> module is random
1
A class inheriting from a class in another module:
(note: The inheriting class must define at least one function.)
>>> class MyRandom(random.Random):
... def play(self):
... pass
>>> module = getmodule(MyRandom)
>>> if __name__ == '__main__':
... name = 'rad_util'
... else:
... name = module.__name__
>>> name
'rad_util'
>>> module is sys.modules[__name__]
1
Discussion:
This approach is slightly hackish, and won't work in various situations.
However, this was the approach recommended by GvR, so it's as good as
you'll get.
See GvR's post in this thread:
http://groups.google.com.au/group/comp.lang.python/browse_thread/thread/966a7bdee07e3b34/c3cab3f41ea84236?lnk=st&q=python+determine+class+module&rnum=4&hl=en#c3cab3f41ea84236
"""
if hasattr(obj, 'func_globals'):
func = obj
else:
# Handle classes.
func = None
for item in obj.__dict__.values():
if hasattr(item, 'func_globals'):
func = item
break
if func is None:
raise ValueError("No functions attached to object: %r" % obj)
module_name = func.func_globals['__name__']
# Get module.
module = sys.modules[module_name]
return module
def round_grid(value, grid, mode=0):
"""Round off the given value to the given grid size.
Arguments:
value -- value to be roudne
grid -- result must be a multiple of this
mode -- 0 nearest, 1 up, -1 down
Examples:
>>> round_grid(7.5, 5)
10
>>> round_grid(7.5, 5, mode=-1)
5
>>> round_grid(7.3, 5, mode=1)
10
>>> round_grid(7.3, 5.0, mode=1)
10.0
"""
off_grid = value % grid
if mode == 0:
add_one = int(off_grid >= (grid / 2.0))
elif mode == 1 and off_grid:
add_one = 1
elif mode == -1 and off_grid:
add_one = 0
result = ((int(value / grid) + add_one) * grid)
return result
def get_args(argv):
"""Store command-line args in a dictionary.
-, -- prefixes are removed
Items not prefixed with - or -- are stored as a list, indexed by 'args'
For options that take a value use --option=value
Consider using optparse or getopt (in Python standard library) instead.
"""
d = {}
args = []
for arg in argv:
if arg.startswith('-'):
parts = re.sub(r'^-+', '', arg).split('=')
if len(parts) == 2:
d[parts[0]] = parts[1]
else:
d[parts[0]] = None
else:
args.append(arg)
d['args'] = args
return d
if __name__ == '__main__':
import doctest
doctest.testmod(sys.modules['__main__'])
|
"""
Copyright 2017 Ryan Wick (rrwick@gmail.com)
https://github.com/rrwick/Unicycler
This module contains functions relating to BLAST, which Unicycler uses to rotate completed circular
replicons to a standard starting point.
This file is part of Unicycler. Unicycler is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version. Unicycler is distributed in
the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along with Unicycler. If
not, see <http://www.gnu.org/licenses/>.
"""
import os
import subprocess
from .misc import load_fasta
from . import log
class CannotFindStart(Exception):
pass
def find_start_gene(sequence, start_genes_fasta, identity_threshold, coverage_threshold, blast_dir,
makeblastdb_path, tblastn_path):
"""
This function uses tblastn to look for start genes in the sequence. It returns the first gene
(using the order in the file) which meets the identity and coverage thresholds, as well as
the position of that gene (including which strand it is on).
This function assumes that the sequence is circular with no overlap.
"""
# Prepare the replicon sequence. In order to get a solid, single BLAST hit in cases where the
# gene overlaps from the end to the start, we have to duplicate some of the replicon sequence
# for the BLAST database.
seq_len = len(sequence)
start_genes_fasta = os.path.abspath(start_genes_fasta)
queries = load_fasta(start_genes_fasta)
if not queries:
raise CannotFindStart
longest_query = max(len(x[1]) for x in queries)
longest_query *= 3 # amino acids to nucleotides
dup_length = min(seq_len, longest_query)
sequence = sequence + sequence[:dup_length]
# BLAST has serious issues with paths that contain spaces. This page explains some of it:
# https://www.ncbi.nlm.nih.gov/books/NBK279669/
# But I couldn't make it all work for makeblastdb (spaces made it require -out, and it never
# accepted spaces in the -out path, no matter how I used quotes). So we will just move into the
# temporary directory to run the BLAST commands.
starting_dir = os.getcwd()
os.chdir(blast_dir)
# Create a FASTA file of the replicon sequence.
replicon_fasta_filename = 'replicon.fasta'
replicon_fasta = open(replicon_fasta_filename, 'w')
replicon_fasta.write('>replicon\n')
replicon_fasta.write(sequence)
replicon_fasta.write('\n')
replicon_fasta.close()
# Build the BLAST database.
command = [makeblastdb_path, '-dbtype', 'nucl', '-in', replicon_fasta_filename]
log.log(' ' + ' '.join(command), 2)
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, err = process.communicate()
if err:
log.log('\nmakeblastdb encountered an error:\n' + err.decode())
os.chdir(starting_dir)
raise CannotFindStart
# Run the tblastn search.
command = [tblastn_path, '-db', replicon_fasta_filename, '-query', start_genes_fasta, '-outfmt',
'6 qseqid sstart send pident qlen qseq qstart bitscore', '-num_threads', '1']
log.log(' ' + ' '.join(command), 2)
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
blast_out, blast_err = process.communicate()
process.wait()
if blast_err:
log.log('\nBLAST encountered an error:\n' + blast_err.decode())
# Find the best hit in the results.
best_hit, best_bitscore = None, 0
for line in blast_out.decode().splitlines():
hit = BlastHit(line, seq_len)
if hit.pident >= identity_threshold and hit.query_cov >= coverage_threshold and \
hit.qstart == 0 and hit.bitscore > best_bitscore:
best_hit = hit
best_bitscore = hit.bitscore
os.chdir(starting_dir)
if best_bitscore:
return best_hit
else:
raise CannotFindStart
class BlastHit(object):
def __init__(self, blast_line, seq_len):
self.qseqid = ''
self.pident, self.qstart, self.bitscore, self.query_cov, self.start_pos = 0, 0, 0, 0, 0
self.flip = False
parts = blast_line.strip().split('\t')
if len(parts) > 7:
self.qseqid = parts[0]
self.pident = float(parts[3])
self.qstart = int(parts[6]) - 1
self.bitscore = float(parts[7])
sstart = int(parts[1]) - 1
send = int(parts[2])
qlen = float(parts[4])
qseq = parts[5]
self.query_cov = 100.0 * len(qseq) / qlen
if sstart <= send:
self.start_pos = sstart
self.flip = False
else:
self.start_pos = sstart + 1
self.flip = True
if self.start_pos >= seq_len:
self.start_pos -= seq_len
def __repr__(self):
return 'BLAST hit: query=' + self.qseqid + ', subject start=' + str(self.start_pos) + \
', strand=' + ('reverse' if self.flip else 'forward') + ', ID=' + \
str(self.pident) + ', cov=' + str(self.query_cov) + ', bitscore=' + \
str(self.bitscore)
|
import sys
import os
import re
def human_size_to_byte(number):
"""
Convert number of these units to bytes, ignore case:
b : 512
kB : 1000
K : 1024
mB : 1000*1000
m : 1024*1024
MB : 1000*1000
M : 1024*1024
GB : 1000*1000*1000
G : 1024*1024*1024
TB : 1000*1000*1000*1000
T : 1024*1024*1024*1024
PB : 1000*1000*1000*1000*1000
P : 1024*1024*1024*1024*1024
EB : 1000*1000*1000*1000*1000*1000
E : 1024*1024*1024*1024*1024*1024
ZB : 1000*1000*1000*1000*1000*1000*1000
Z : 1024*1024*1024*1024*1024*1024*1024
YB : 1000*1000*1000*1000*1000*1000*1000*1000
Y : 1024*1024*1024*1024*1024*1024*1024*1024
number is of one of these forms:
123, 123b, 123M, 1G
"""
mapping = {
'b' : 512 ,
'kb' : 1000,
'k' : 1024,
'mb' : 1000**2,
'm' : 1024**2,
'gb' : 1000**3,
'g' : 1024**3,
'tb' : 1000**4,
't' : 1024**4,
'pb' : 1000**5,
'p' : 1024**5,
'eb' : 1000**6,
'e' : 1024**6,
'zb' : 1000**7,
'z' : 1024**7,
'yb' : 1000**8,
'y' : 1024**8,
}
unit = re.sub('^[0-9]+', '', number)
if unit:
unit = unit.lower()
assert unit in mapping.keys(), "wrong unit %s " % unit
amount = int(number[:-len(unit)])
return mapping[unit] * amount
else:
return int(number)
def correct_offset(file):
"""Due to Python cache issue, the real file offset of the
underlying file descriptor may differ, this function can correct
it.
"""
cur = file.seek(0, 1)
file.seek(0, 2)
file.seek(cur)
def open_file(file):
if file == '-':
return os.fdopen(sys.stdin.fileno(), 'rb')
else:
return open(file, 'rb')
class Locator:
"""Search from the end of the file backward, locate the starting
offset of the specified amount, measured by line, or by byte.
"""
def __init__(self, ifile, mode, amount, bs=8192):
"""mode can be 'lines' or 'bytes'"""
assert ifile.seekable(), "input file is not seekable"
self.orig_pos = ifile.seek(0, 1)
self.ifile = ifile
self.mode = mode
self.amount = amount
self.bs = bs
def find_line(self, ifile, chunk, amount):
""" Find if data chunk contains 'amount' number of lines.
Return value: (stat, pos, remaining-amount). If stat is True,
pos is the result, otherwise pos is not used, remaining-amount
is for the next run.
"""
count = chunk.count(b'\n')
if count <= amount:
amount -= count
return False, 0, amount
else: # found
pos = -1
for i in range(count - amount):
pos = chunk.index(b'\n', pos+1)
pos += 1
diff = len(chunk) - pos
pos = ifile.seek(-diff, 1)
return True, pos, 0
def find_byte(self, ifile, chunk, amount):
""" Find if data chunk contains 'amount' number of bytes.
Return value: (stat, pos, remaining-amount). If stat is True,
pos is the result, otherwise pos is not used, remaining-amount
is for the next run.
"""
length = len(chunk)
if length < amount:
amount -= length
return False, 0, amount
else: # found
pos = ifile.seek(-amount, 1)
return True, pos, 0
def find(self, ifile, offset, size, amount):
"""Read 'size' bytes starting from offset to find.
Return value: (stat, pos, remaining-amount). If stat is True,
pos is the result, otherwise pos is not used, remaining-amount
is for the next run.
"""
try:
pos = ifile.seek(offset)
except OSError:
assert False, "unkown file seeking failure"
chunk = ifile.read(size)
if self.mode == 'lines':
return self.find_line(ifile, chunk, amount)
else:
return self.find_byte(ifile, chunk, amount)
def run(self):
"""Find the offset of the last 'amount' lines"""
ifile = self.ifile
amount = self.amount
orig_pos = self.orig_pos
end = ifile.seek(0, 2) # jump to the end
# nothing to process, return the original position
total = end - orig_pos
if total <= amount:
correct_offset(ifile)
return orig_pos
bs = self.bs
# process the last block
remaining = total % bs
offset = end - remaining
stat, pos, amount = self.find(ifile, offset, remaining, amount)
while not stat and offset != orig_pos:
offset -= bs
stat, pos, amount = self.find(ifile, offset, bs, amount)
ifile.seek(self.orig_pos)
correct_offset(ifile)
return pos
class Buffer:
def __init__(self, amount):
self.min = amount
self.total = 0
self.data = []
def push(self, pair):
self.data.append(pair)
self.total += pair[0]
def pop(self):
pair = self.data.pop(0)
self.total -= pair[0]
return pair
def cut(self):
"""Pop as many pairs off the head of the self.data as
self.is_ready() is True, return a combined result.
"""
count = 0
data = b''
while self.is_ready():
x, y = self.pop()
count += x
data += y
return count, data
def is_satisfied(self):
"""The minimum amount is satisfied"""
return self.total >= self.min
def is_ready(self):
"""The buffer is ready to pop"""
return self.total - self.data[0][0] >= self.min
class HeadWorkerSL:
"""Seekable, line mode"""
def __init__(self, ifile, ofile, amount, bs=None):
self.ifile = ifile
self.ofile = ofile
self.amount = amount
self.bs = bs or 8192
def read(self):
return self.ifile.read(self.bs)
def transform(self, data):
return data.count(b'\n')
def is_last(self, count):
return count >= self.amount
def action(self, data, count):
self.ofile.write(data)
self.amount -= count
def handle_last(self, data):
pos = -1
for i in range(self.amount):
pos = data.index(b'\n', pos+1)
pos += 1
self.ofile.write(data[:pos])
over_read = len(data) - pos
try:
self.ifile.seek(-over_read, 1)
except Exception:
pass
def run(self):
while self.amount:
data = self.read()
if not data:
break
count = self.transform(data)
if self.is_last(count):
self.handle_last(data)
break
else:
self.action(data, count)
class HeadWorkerSB(HeadWorkerSL):
"""Seekable, byte mode"""
def transform(self, data):
return len(data)
def handle_last(self, data):
self.ofile.write(data[:self.amount])
over_read = len(data) - self.amount
try:
self.ifile.seek(-over_read, 1)
except Exception:
pass
class HeadWorkerTL(HeadWorkerSL):
"""Terminal, line mode"""
def read(self):
return self.ifile.readline()
def action(self, data, count):
self.ofile.write(data)
self.amount -= 1
self.ofile.flush()
def handle_last(self, data):
self.ofile.write(data)
self.ofile.flush()
class HeadWorkerTB(HeadWorkerSB):
"""Terminal, byte mode"""
def read(self):
return self.ifile.readline()
class HeadWorkerULIT(HeadWorkerSL):
"""Unseekable, line mode ignore tail"""
def __init__(self, ifile, ofile, amount, bs=None):
self.ifile = ifile
self.ofile = ofile
self.amount = amount
self.bs = bs or 8192
def read(self):
return self.ifile.read(self.bs)
def transform(self, data):
return data.count(b'\n')
def fill(self):
"""Fill up the buffer with content from self.ifile"""
amount = self.amount
buffer = Buffer(amount)
while True:
data = self.read()
if not data:
break
count = self.transform(data)
buffer.push((count, data))
if buffer.is_satisfied():
break
return buffer
def step(self, buffer):
"""Read and process the self.ifile step by step,
return False if nothing left in self.ifile.
"""
data = self.read()
if not data:
return False
count = self.transform(data)
buffer.push((count, data))
if buffer.is_ready():
x, data = buffer.cut()
self.proc(data)
return True
def proc(self, data):
self.ofile.write(data)
self.ofile.flush()
def handle_last(self, buffer):
while True:
x, data = buffer.pop()
if buffer.is_satisfied():
self.proc(data)
else:
diff = buffer.min - buffer.total
lines = data.splitlines(keepends=True)
self.ofile.writelines(lines[:-diff])
break
self.ofile.flush()
def run(self):
buffer = self.fill()
if buffer.is_satisfied():
while self.step(buffer):
pass
self.handle_last(buffer)
class HeadWorkerTLIT(HeadWorkerULIT):
"""Terminal, line mode ignore tail"""
def read(self):
return self.ifile.readline()
class HeadWorkerUBIT(HeadWorkerULIT):
"""Unseekable, byte mode ignore tail"""
def transform(self, data):
return len(data)
def handle_last(self, buffer):
while True:
x, data = buffer.pop()
if buffer.is_satisfied():
self.ofile.write(data)
else:
diff = buffer.min - buffer.total
self.ofile.write(data[:-diff])
break
self.ofile.flush()
class HeadWorkerTBIT(HeadWorkerUBIT):
"""Terminal, byte mode ignore tail"""
def read(self):
return self.ifile.readline()
class Mixin:
def copy_to_end(self):
while True:
chunk = self.read()
if not chunk:
break
self.ofile.write(chunk)
class TailWorkerSLIH(HeadWorkerSL, Mixin):
"""Seekable, line mode, ignore head"""
def __init__(self, ifile, ofile, amount, bs=None):
super(TailWorkerSLIH, self).__init__(ifile, ofile, amount, bs)
if amount > 0:
self.amount -= 1
def action(self, data, count):
self.amount -= count
def handle_last(self, data):
pos = -1
for i in range(self.amount):
pos = data.index(b'\n', pos+1)
pos += 1
self.ofile.write(data[pos:])
self.copy_to_end()
class TailWorkerSBIH(TailWorkerSLIH):
"""Seekable, byte mode, ignore head"""
def transform(self, data):
return len(data)
def handle_last(self, data):
self.ofile.write(data[self.amount:])
self.copy_to_end()
class TailWorkerSB(TailWorkerSLIH):
def __init__(self, ifile, ofile, bs=None):
self.ifile = ifile
self.ofile = ofile
self.bs = bs or 8192
def run(self):
self.copy_to_end()
class TailWorkerULIH(HeadWorkerULIT, Mixin):
"""Unseekable, line mode ignore head"""
def proc(self, data):
"""Just ignore the data"""
def handle_last(self, buffer):
while True:
x, data = buffer.pop()
if not buffer.is_satisfied():
diff = buffer.min - buffer.total
self.split_and_proc(data, diff)
for x, data in buffer.data:
self.ofile.write(data)
break
def split_and_proc(self, data, diff):
lines = data.splitlines(keepends=True)
self.ofile.writelines(lines[-diff:])
class TailWorkerUBIH(TailWorkerULIH):
"""Unseekable, byte mode ignore head"""
def read(self):
return self.ifile.read(self.bs)
def transform(self, data):
return len(data)
def split_and_proc(self, data, diff):
self.ofile.write(data[-diff:])
class TailWorkerTLIH(TailWorkerULIH):
"""Terminal, line mode ignore head"""
def read(self):
return self.ifile.readline()
class TailWorkerTBIH(TailWorkerTLIH):
"""Terminal, byte mode ignore head"""
def transform(self, data):
return len(data)
def split_and_proc(self, data, diff):
self.ofile.write(data[-diff:])
class TailWorkerTL(TailWorkerSLIH):
"""Terminal, line mode, ignore head"""
def read(self):
return self.ifile.readline()
def handle_last(self, data):
self.copy_to_end()
class TailWorkerTB(TailWorkerTL):
"""Terminal, byte mode, ignore head"""
def transform(self, data):
return len(data)
def handle_last(self, data):
self.ofile.write(data[self.amount:])
self.copy_to_end()
class GrepNameDetermined(Exception): pass
class GrepStatusDetermined(Exception): pass
class GrepWorker:
# VT100 color code
c_fname = b'\x1b[35m' # magenta
c_sep = b'\x1b[36m' # cyan
c_lnum = b'\x1b[32m' # green
c_match = b'\x1b[31m\x1b[1m' # bold red
c_off = b'\x1b[0m' # turn off color
sep_line = b'--\n'
c_sep_line = c_sep + b'--' + c_off + b'\n'
def __init__(self, pattern, options, ifile, ofile, bs=None):
self.pattern = pattern
self.options = options
self.ifile = ifile
self.ofile = ofile
self.bs = bs or 8192
self.nr = 0 # number of records
self.fname = self.make_fname(ifile.name)
self.status = False
# Invert the sense of matching
if ('invert' in options and 'file_match' not in options
and 'count' not in options):
self.on_match, self.on_not_match = self.on_not_match, self.on_match
# set on_match method for -q option
if 'quiet' in options:
self.on_match = self.quiet_on_match
# set reader for tty input file
if ifile.isatty():
self.read = self.read_tty
self.write = self.write_tty
# setup color output
color = options['color']
if color == 'always' or self.ofile.isatty() and color == 'auto':
self.sep_line = self.c_sep_line
self.make_fname_str = self.make_color_fname_str
self.make_lnum_str = self.make_color_lnum_str
self.make_matcher = self.make_color_matcher
self.matcher = self.make_matcher(options)
def insert_line_number(self, lines, num, sep=b':'):
"""Insert line number to the head of each line"""
num = str(num).encode()
num_str = self.make_lnum_str(num, sep)
return (b'%s%s' % (num_str, line) for line in lines)
def insert_file_name(self, lines, fname, sep=b':'):
"""Insert file name to the head of each line"""
fname_str = self.make_fname_str(fname, sep)
return (b'%s%s' % (fname_str, line) for line in lines)
def make_lnum_str(self, num, sep):
return num + sep
def make_fname_str(self, fname, sep):
return fname + sep
def make_color_lnum_str(self, num, sep):
return self.c_lnum + num + self.c_sep + sep + self.c_off
def make_color_fname_str(self, fname, sep):
return self.c_fname + fname + self.c_sep + sep + self.c_off
def quiet_on_match(self, *args, **kargs):
raise GrepStatusDetermined
def read(self):
"""Return an enumerate object with line number"""
lines = self.ifile.readlines(self.bs)
if not lines:
return None
count = len(lines)
res = enumerate(lines, self.nr + 1)
self.nr += count
return res
def read_tty(self):
"""Read the terminal, line by line"""
line = self.ifile.readline()
if not line:
return None
self.nr += 1
return [(self.nr, line)]
def make_normal_matcher(self, options):
# handle -w option, match word boundary
pat = self.pattern
if 'word_regexp' in self.options:
pat = r'\b%s\b' % pat
# handle -i option, ignore case
flags = 0
if 'ignore_case' in self.options:
flags |= re.IGNORECASE
pat = re.compile(pat.encode(), flags)
return pat
def make_matcher(self, options):
pat = self.make_normal_matcher(options)
class C:
def findall(self, line):
return pat.findall(line), line
return C()
def make_color_matcher(self, options):
pat = self.make_normal_matcher(options)
c_match = self.c_match
c_off = self.c_off
class C:
def findall(self, line):
matches = pat.findall(line)
if matches:
matches = [c_match + x + c_off for x in matches]
line = re.sub(pat, self.apply_color, line)
return matches, line
def apply_color(self, m):
return c_match + m.group() + c_off
return C()
def make_fname(self, name):
"""Make a file name for output"""
if name == 0:
name = '(standard input)'.encode()
else:
name = str(name).encode()
return name
def format_output(self, lines, lnum, options, sep=b':'):
"""Format lines for output"""
# handle -n option, show line number
if 'line_number' in options:
lines = self.insert_line_number(lines, lnum, sep)
# insert file name if necessary
if options['with_filename']:
lines = self.insert_file_name(lines, self.fname, sep)
return lines
def write(self, lines):
self.ofile.writelines(lines)
def write_tty(self, lines):
"""Write to terminal, flush after every write"""
self.ofile.writelines(lines)
self.ofile.flush()
def on_match(self, matches, line, lnum):
self.status = True
# handle -o option, show only the matched part
if 'only_matching' in self.options:
lines = (x + b'\n' for x in matches)
else:
lines = [line]
lines = self.format_output(lines, lnum, self.options)
self.write(lines)
def on_not_match(self, *args, **kargs):
return None
def run(self):
while True:
lines_data = self.read()
if not lines_data:
break
for n, line in lines_data:
matches, line = self.matcher.findall(line)
if matches:
self.on_match(matches, line, n)
else:
self.on_not_match(matches, line, n)
return self.status
class GrepWorkerAgg(GrepWorker):
def __init__(self, *args, **kargs):
super(GrepWorkerAgg, self).__init__(*args, **kargs)
self.match_count = 0
def format_output(self, lines, options):
"""Format lines for output"""
# insert file name if necessary
if options['with_filename']:
lines = self.insert_file_name(lines, self.fname)
return lines
def on_match(self, matches, line, lnum):
self.status = True
self.match_count += 1
def run(self):
status = super(GrepWorkerAgg, self).run()
lines = [str(self.match_count).encode() + b'\n']
lines = self.format_output(lines, self.options)
self.write(lines)
return status
class GrepWorkerFileName(GrepWorker):
def on_match(self, matches, line, lnum):
raise GrepNameDetermined
def run(self):
try:
super(GrepWorkerFileName, self).run()
status = False
except GrepNameDetermined:
self.write([self.fname + b'\n'])
status = True
return status
class GrepWorkerContext(GrepWorker):
def __init__(self, *args, **kargs):
super(GrepWorkerContext, self).__init__(*args, **kargs)
self.before = self.options.get('before', 0)
self.after = self.options.get('after', 0)
self.b_buf = []
self.a_counter = 0
self.last_written_lnum = 0
def write_separator(self, lnum):
last_lnum = self.last_written_lnum
first_lnum = self.b_buf[0][0] if self.b_buf else lnum
if last_lnum and first_lnum - last_lnum > 1:
self.write([self.sep_line])
def on_match(self, matches, line, lnum):
# the 'before' buffer may contain more lines than needed,
# truncate it before writing the separator in order not
# to interfere the line number calculation.
if self.before:
self.b_buf = self.b_buf[-self.before:]
else:
self.b_buf.clear()
self.write_separator(lnum)
self.write_b_buffer()
super(GrepWorkerContext, self).on_match(matches, line, lnum)
self.last_written_lnum = lnum
self.reset_a_counter()
def on_not_match(self, matches, line, lnum):
if self.a_counter:
if 'only_matching' not in self.options:
lines = self.format_output([line], lnum, self.options, b'-')
self.write(lines)
self.last_written_lnum = lnum
self.a_counter -= 1
else:
self.b_buf.append((lnum, line))
def reset_a_counter(self):
self.a_counter = self.after
def write_b_buffer(self):
"""Write out the 'before' buffer"""
if not self.b_buf:
return
# write only when -o option is not presented,
if 'only_matching' not in self.options:
for lnum, line in self.b_buf:
lines = self.format_output([line], lnum, self.options, b'-')
self.write(lines)
self.last_written_lnum = self.b_buf[-1][0]
self.b_buf.clear()
def run(self):
bs = self.before
while True:
self.b_buf = self.b_buf[-bs:]
lines_data = self.read()
if not lines_data:
break
for n, line in lines_data:
matches, line = self.matcher.findall(line)
if matches:
self.on_match(matches, line, n)
else:
self.on_not_match(matches, line, n)
return self.status
def recursive_walk(worker, names, pattern, options):
"""Process all regular files, descend into directories. When
the -q option is provided, the first match will trigger an
exception named GrepStatusDetermined."""
def processor(names, pattern, options, worker):
status_list = []
for name in names:
if os.path.isfile(name):
status = worker(name, pattern, options)
status_list.append(status)
elif os.path.isdir(name):
try:
sub_names = os.listdir(name)
except Exception as e:
print(str(e), file=sys.stderr)
status_list.append(False)
else:
sub_names = [os.path.join(name, x) for x in sub_names]
names.extend(sub_names)
return status_list
return walk(worker, names, pattern, options, processor)
def walk(worker, names, pattern, options, processor=None):
"""Each file shall be a regular file. When the -q option is
provided, the first match will trigger an exception named
GrepStatusDetermined."""
if not processor:
def processor(names, pattern, options, worker):
status_list = []
for name in names:
status = worker(name, pattern, options)
status_list.append(status)
return status_list
try:
status_list = processor(names, pattern, options, worker)
except GrepStatusDetermined:
status_list = [True]
if 'quiet' in options:
return any(status_list)
else:
return all(status_list)
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0008_auto_20150405_1435'),
]
operations = [
migrations.AddField(
model_name='brewpispark',
name='spark_time',
field=models.BigIntegerField(default=0, verbose_name=b'Spark Time'),
preserve_default=True,
),
]
|
import logging
from mimeprovider.documenttype import get_default_document_types
from mimeprovider.client import get_default_client
from mimeprovider.exceptions import MimeException
from mimeprovider.exceptions import MimeBadRequest
from mimeprovider.mimerenderer import MimeRenderer
from mimeprovider.validators import get_default_validator
__all__ = ["MimeProvider"]
__version__ = "0.1.5"
log = logging.getLogger(__name__)
def build_json_ref(request):
def json_ref(route, document=None, **kw):
ref = dict()
ref["$ref"] = request.route_path(route, **kw)
rel_default = None
if document:
rel_default = getattr(document, "object_type",
document.__class__.__name__)
else:
rel_default = route
ref["rel"] = kw.pop("rel_", rel_default)
return ref
return json_ref
class MimeProvider(object):
def __init__(self, documents=[], **kw):
self.renderer_name = kw.get("renderer_name", "mime")
self.attribute_name = kw.get("attribute_name", "mime_body")
self.error_handler = kw.get("error_handler", None)
self.set_default_renderer = kw.get("set_default_renderer", False)
self.validator = kw.get("validator")
if self.validator is None:
self.validator = get_default_validator()
types = kw.get("types")
if types is None:
types = get_default_document_types()
if not types:
raise ValueError("No document types specified")
self.client = kw.get("client")
if self.client is None:
self.client = get_default_client()
self.type_instances = [t() for t in types]
self.mimeobjects = dict()
self.mimetypes = dict(self._generate_base_mimetypes())
self.error_document_type = kw.get(
"error_document_type",
self.type_instances[0])
self.register(*documents)
def _validate(self, document):
if not hasattr(document, "object_type"):
raise ValueError(
("Object does not have required 'object_type' "
"attribute {0!r}").format(document))
def _generate_base_mimetypes(self):
"""
Generate the base mimetypes as described by non customized document
types.
"""
for t in self.type_instances:
if t.custom_mime:
continue
yield t.mime, (t, None, None)
def _generate_document_mimetypes(self, documents):
for t in self.type_instances:
if not t.custom_mime:
continue
for o in documents:
mimetype = t.mime.format(o=o)
validator = None
if hasattr(o, "schema"):
validator = self.validator(o.schema)
m_value = (mimetype, (t, o, validator))
o_value = (o, (t, mimetype, validator))
yield m_value, o_value
def register(self, *documents):
documents = list(documents)
for document in documents:
self._validate(document)
generator = self._generate_document_mimetypes(documents)
for (m, m_value), (o, o_value) in generator:
self.mimeobjects.setdefault(o, []).append(o_value)
if m not in self.mimetypes:
self.mimetypes[m] = m_value
continue
_, cls, validator = self.mimetypes[m]
_, new_cls, validator = m_value
raise ValueError(
"Conflicting handler for {0}, {1} and {2}".format(
m, cls, new_cls))
def get_client(self, *args, **kw):
return self.client(self.mimetypes, self.mimeobjects, *args, **kw)
def get_mime_body(self, request):
if not request.body or not request.content_type:
return None
result = self.mimetypes.get(request.content_type)
if result is None:
raise MimeBadRequest(
"Unsupported Content-Type: " + request.content_type)
document_type, cls, validator = result
# the specific document does not support deserialization.
if not hasattr(cls, "from_data"):
raise MimeBadRequest(
"Unsupported Content-Type: " +
request.content_type)
return document_type.parse(validator, cls, request.body)
@property
def renderer(self):
if self.error_handler is None:
raise ValueError("No 'error_handler' available")
def setup_renderer(helper):
return MimeRenderer(self.mimetypes, self.error_document_type,
self.error_handler, validator=self.validator)
return setup_renderer
def add_config(self, config):
config.add_renderer(self.renderer_name, self.renderer)
if self.set_default_renderer:
config.add_renderer(None, self.renderer)
config.set_request_property(self.get_mime_body, self.attribute_name,
reify=True)
config.set_request_property(build_json_ref, "json_ref", reify=True)
config.add_view(self.error_handler, context=MimeException,
renderer=self.renderer_name)
|
import zmq
from crpropa import Module
class SendCandidateProperties( Module ):
""" Sends candidate proporties given by the function
```extract_func( candidate )``` over the network
to the server on ```ip_port```
"""
def __init__( self, ip_port, extract_func ):
Module.__init__( self )
self.socket = None
self.ip_port = "tcp://" + ip_port
self.extract_func = extract_func
def beginRun( self ):
context = zmq.Context()
self.socket = context.socket( zmq.REQ )
self.socket.connect( self.ip_port )
def process(self, c):
self.socket.send_pyobj( self.extract_func( c ) )
msg_in = self.socket.recv_pyobj()
def endRun( self ):
del self.socket
class RecvCandidateProperties:
""" Server side: receive data from the client module
while listening on ```ip_port```
self.recv method should be in a non-blocking loop
"""
def __init__( self, ip_port ):
context = zmq.Context()
self.socket = context.socket( zmq.REP )
self.socket.bind( "tcp://" + ip_port )
def recv( self ):
msg = self.socket.recv_pyobj()
self.socket.send_pyobj(msg)
return msg
|
"""
A basic example of loading YAML
Make sure you use the "safe_load" method and not the "load" method
that will give you warnings.
References:
- https://stackoverflow.com/questions/1773805/how-can-i-parse-a-yaml-file-in-python
"""
import yaml
with open("data_samples/basic.yaml", 'r') as stream:
try:
data=yaml.safe_load(stream)
assert "concepts" in data
except yaml.YAMLError as exc:
print(exc)
|
import threading
from systemd import journal
from threading import Thread
import smtplib
import email.utils
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
class Mailer(threading.Thread):
"""
Mailer
:desc: Class that sends an email
Extends Thread
"""
def __init__(self):
"""
__init__
:desc: Constructor function that calls parent
"""
Thread.__init__(self)
def run(self, stri, dictio):
"""
run
:desc : Function that does the heavy lifting
:params : The string to be mailed and a dict
containing config options necessary for the mail to be delivered.
"""
dictionary = dictio
msg = MIMEMultipart("alternative")
#get it from the queue?
stripped = stri.strip()
part1 = MIMEText(stripped, "plain")
msg['Subject'] = dictionary['email_subject']
#http://pymotw.com/2/smtplib/
msg['To'] = email.utils.formataddr(('Recipient', dictionary['email_to']))
msg['From'] = email.utils.formataddr((dictionary['email_from'], dictionary['email_from']))
msg.attach(part1)
if dictionary['smtp'] == True:
# no auth
if dictionary['auth'] == False:
s = smtplib.SMTP()
s.connect(host=str(dictionary['smtp_host']), port=dictionary['smtp_port'])
try:
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
journal.send("systemd-denotify: "+message)
finally:
s.quit()
del s
# auth
elif dictionary['auth'] == True:
s = smtplib.SMTP()
s.connect(host=str(dictionary['smtp_host']), port=dictionary['smtp_port'])
s.login(str(dictionary['auth_user']), str(dictionary['auth_password']))
try:
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string().strip())
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
journal.send("systemd-denotify: "+message)
finally:
s.quit()
del s
else:
pass
#smtps
if dictionary['smtps'] == True:
# no auth ?
if dictionary['auth'] == False:
try:
if len(dictionary['smtps_cert']) > 0 and len(dictionary['smtps_key']) > 0:
s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port'], keyfile=dictionary['smtps_key'], certfile=dictionary['smtps_cert'])
s.ehlo_or_helo_if_needed()
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
else:
s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port'])
s.ehlo_or_helo_if_needed()
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
journal.send("systemd-denotify: "+message)
finally:
s.quit()
del s
# auth
elif dictionary['auth'] == True:
try:
#check whether it is a real file and pem encoded
if len(dictionary['smtps_cert']) > 0 and len(dictionary['smtps_key']) > 0:
s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port'], keyfile=dictionary['smtps_key'], certfile=dictionary['smtps_cert'])
s.ehlo_or_helo_if_needed()
s.login(dictionary['auth_user'], dictionary['auth_password'])
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
else:
s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port'])
s.ehlo_or_helo_if_needed()
s.login(dictionary['auth_user'], dictionary['auth_password'])
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
journal.send("systemd-denotify: "+message)
finally:
s.quit()
del s
else:
pass
#starttls
if dictionary['starttls'] == True:
# no auth
if dictionary['auth'] == False:
try:
s = smtplib.SMTP()
s.connect(host=str(dictionary['starttls_host']), port=dictionary['starttls_port'])
s.ehlo()
#http://pymotw.com/2/smtplib/
if s.has_extn("STARTTLS"):
#check whether it is a real file and pem encoded
if len(dictionary['starttls_cert']) > 0 and len(dictionary['starttls_key']) > 0:
s.starttls(keyfile=dictionary['starttls_key'], certfile=dictionary['starttls_cert'])
s.ehlo()
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
else:
s.starttls()
s.ehlo()
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
journal.send("systemd-denotify: "+message)
finally:
s.quit()
del s
# auth
elif dictionary['auth'] == True:
try:
s = smtplib.SMTP()
s.connect(host=str(dictionary['starttls_host']), port=dictionary['starttls_port'])
#http://pymotw.com/2/smtplib/
s.ehlo()
if s.has_extn("STARTTLS"):
#check whether it is a real file and pem encoded
if len(dictionary['starttls_cert']) >0 and len(dictionary['starttls_key'])>0:
s.starttls(keyfile=dictionary['starttls_key'], certfile=dictionary['starttls_cert'])
s.ehlo()
s.login(str(dictionary['auth_user']).strip(), str(dictionary['auth_password']))
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
else:
s.starttls()
s.ehlo()
s.login(str(dictionary['auth_user']).strip(), str(dictionary['auth_password']))
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
journal.send("systemd-denotify: "+message)
finally:
s.quit()
del s
else:
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.