repo_name
stringlengths 5
104
| path
stringlengths 4
248
| content
stringlengths 102
99.9k
|
|---|---|---|
Sorsly/subtle
|
google-cloud-sdk/lib/surface/runtime_config/configs/waiters/delete.py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The configs waiters delete command."""
from googlecloudsdk.api_lib.runtime_config import util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.runtime_config import flags
from googlecloudsdk.core import log
class Delete(base.DeleteCommand):
"""Delete waiter resources.
This command deletes the waiter resource with the specified name.
"""
detailed_help = {
'DESCRIPTION': '{description}',
'EXAMPLES': """\
To delete a waiter named "my-waiter" within a configuration named
"my-config", run:
$ {command} my-waiter --config-name my-config
""",
}
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
flags.AddConfigFlag(parser)
parser.add_argument('name', help='The waiter name.')
def Collection(self):
"""Returns the default collection path string.
Returns:
The default collection path string.
"""
return 'runtimeconfig.waiters'
def Run(self, args):
"""Run 'runtime-configs waiters delete'.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Raises:
HttpException: An http error response was received while executing api
request.
"""
waiter_client = util.WaiterClient()
messages = util.Messages()
waiter_resource = util.ParseWaiterName(args.name, args)
waiter_client.Delete(
messages.RuntimeconfigProjectsConfigsWaitersDeleteRequest(
name=waiter_resource.RelativeName(),
)
)
log.DeletedResource(waiter_resource)
|
bradmwalker/wanmap
|
setup.py
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.md')) as f:
CHANGES = f.read()
requires = [
'arrow',
'celery',
'psycopg2>=2.7.0', # register_ipaddress
'pyramid_jinja2',
'pyramid_tm',
'waitress',
'zope.sqlalchemy',
]
setup(name='wanmap',
version='0.0',
description='A distributed nmap web application',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Brad Walker',
author_email='brad@bradmwalker.com',
url='https://wanmap.org',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="wanmap",
entry_points="""\
[paste.app_factory]
main = wanmap:main
[console_scripts]
initialize_wanmap_db = wanmap.scripts.initializedb:main
""",
)
|
quchunguang/test
|
testpy3/testinternet.py
|
"""
Created on 2013-1-19
@author: Administrator
"""
import urllib.request
import smtplib
for line in urllib.request.urlopen('http://www.baidu.com'):
line = line.decode('gb2312')
print(line)
server = smtplib.SMTP('localhost')
server.sendmail('quchunguang@example.org', 'quchunguang@gmail.com',
"""To: quchunguang@example.org
From: quchunguang@gmail.com
Beware the Ides of March.
""")
server.quit()
|
herove/dotfiles
|
sublime/Packages/SublimeCodeIntel/libs/codeintel2/util.py
|
#!python
# encoding: utf-8
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
# License for the specific language governing rights and limitations
# under the License.
#
# The Original Code is Komodo code.
#
# The Initial Developer of the Original Code is ActiveState Software Inc.
# Portions created by ActiveState Software Inc are Copyright (C) 2000-2007
# ActiveState Software Inc. All Rights Reserved.
#
# Contributor(s):
# ActiveState Software Inc
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
"""Code Intelligence: utility functions"""
import bisect
import os
from os.path import basename
import sys
import re
import stat
import textwrap
import logging
import types
from pprint import pprint, pformat
import time
import codecs
# Global dict for holding specific hotshot profilers
hotshotProfilers = {}
#---- general stuff
def isident(char):
return "a" <= char <= "z" or "A" <= char <= "Z" or char == "_"
def isdigit(char):
return "0" <= char <= "9"
# A "safe" language name for the given language where safe generally
# means safe for a file path.
_safe_lang_from_lang_cache = {
"C++": "cpp",
}
def safe_lang_from_lang(lang):
global _safe_lang_from_lang_cache
try:
return _safe_lang_from_lang_cache[lang]
except KeyError:
safe_lang = lang.lower().replace(' ', '_')
_safe_lang_from_lang_cache[lang] = safe_lang
return safe_lang
# @deprecated: Manager.buf_from_path now uses textinfo to guess lang.
def guess_lang_from_path(path):
lang_from_ext = {
".py": "Python",
".pl": "Perl",
".pm": "Perl",
".tcl": "Tcl",
".php": "PHP",
".inc": "PHP",
".rb": "Ruby",
".rhtml": "RHTML",
".html.erb": "RHTML",
".js": "JavaScript",
".java": "Java",
".css": "CSS",
".xul": "XUL",
".xbl": "XBL",
".html": "HTML",
".xml": "XML",
".tpl": "Smarty",
".django.html": "Django",
".mason.html": "Mason",
".ttkt.html": "TemplateToolkit",
".cxx": "C++",
}
idx = 0
base = basename(path)
while base.find('.', idx) != -1:
idx = base.find('.', idx)
if idx == -1:
break
ext = base[idx:]
if ext in lang_from_ext:
return lang_from_ext[ext]
idx += 1
from codeintel2.common import CodeIntelError
raise CodeIntelError("couldn't guess lang for `%s'" % path)
def gen_dirs_under_dirs(dirs, max_depth, interesting_file_patterns=None,
skip_scc_control_dirs=True):
"""Generate all dirs under the given dirs (including the given dirs
themselves).
"max_depth" is an integer maximum number of sub-directories that
this method with recurse.
"file_patterns", if given, is a sequence of glob patterns for
"interesting" files. Directories with no interesting files are
not included (though sub-directories of these may be).
"skip_scc_control_dirs" is a boolean (default True) indicating if
svn and cvs control dirs should be skipped.
"""
from os.path import normpath, abspath, expanduser
from fnmatch import fnmatch
dirs_to_skip = (skip_scc_control_dirs
and ["CVS", ".svn", ".hg", ".git", ".bzr"] or [])
# We must keep track of the directories we have walked, as the list of dirs
# can overlap - bug 90289.
walked_these_dirs = {}
for dir in dirs:
norm_dir = normpath(abspath(expanduser(dir)))
LEN_DIR = len(norm_dir)
for dirpath, dirnames, filenames in walk2(norm_dir):
if dirpath in walked_these_dirs:
dirnames[:] = [] # Already walked - no need to do it again.
continue
if dirpath[LEN_DIR:].count(os.sep) >= max_depth:
dirnames[:] = [] # hit max_depth
else:
walked_these_dirs[dirpath] = True
for dir_to_skip in dirs_to_skip:
if dir_to_skip in dirnames:
dirnames.remove(dir_to_skip)
if interesting_file_patterns:
for pat, filename in (
(p, f) for p in interesting_file_patterns
for f in filenames):
if fnmatch(filename, pat):
break
else:
# No interesting files in this dir.
continue
yield dirpath
#---- standard module/class/function doc parsing
LINE_LIMIT = 5 # limit full number of lines this number
LINE_WIDTH = 60 # wrap doc summaries to this width
# Examples matches to this pattern:
# foo(args)
# foo(args) -> retval
# foo(args) -- description
# retval = foo(args)
# retval = foo(args) -- description
_gPySigLinePat = re.compile(
r"^((?P<retval>[^=]+?)\s*=|class)?\s*(?P<head>[\w\.]+\s?\(.*?\))\s*(?P<sep>[:<>=-]*)\s*(?P<tail>.*)$")
_gSentenceSepPat = re.compile(r"(?<=\.)\s+", re.M) # split on sentence bndry
def parseDocSummary(doclines, limit=LINE_LIMIT, width=LINE_WIDTH):
"""Parse out a short summary from the given doclines.
"doclines" is a list of lines (without trailing newlines) to parse.
"limit" is the number of lines to which to limit the summary.
The "short summary" is the first sentence limited by (1) the "limit"
number of lines and (2) one paragraph. If the first *two* sentences fit
on the first line, then use both. Returns a list of summary lines.
"""
# Skip blank lines.
start = 0
while start < len(doclines):
if doclines[start].strip():
break
start += 1
desclines = []
for i in range(start, len(doclines)):
if len(desclines) >= limit:
break
stripped = doclines[i].strip()
if not stripped:
break
sentences = _gSentenceSepPat.split(stripped)
if sentences and not sentences[-1].endswith('.'):
del sentences[-1] # last bit might not be a complete sentence
if not sentences:
desclines.append(stripped + ' ')
continue
elif i == start and len(sentences) > 1:
desclines.append(' '.join([s.strip() for s in sentences[:2]]))
else:
desclines.append(sentences[0].strip())
break
if desclines:
if desclines[-1][-1] == ' ':
# If terminated at non-sentence boundary then have extraneous
# trailing space.
desclines[-1] = desclines[-1][:-1]
desclines = textwrap.wrap(''.join(desclines), width)
return desclines
def parsePyFuncDoc(doc, fallbackCallSig=None, scope="?", funcname="?"):
"""Parse the given Python function/method doc-string into call-signature
and description bits.
"doc" is the function doc string.
"fallbackCallSig" (optional) is a list of call signature lines to
fallback to if one cannot be determined from the doc string.
"scope" (optional) is the module/class parent scope name. This
is just used for better error/log reporting.
"funcname" (optional) is the function name. This is just used for
better error/log reporting.
Examples of doc strings with call-signature info:
close(): explicitly release resources held.
x.__repr__() <==> repr(x)
read([s]) -- Read s characters, or the rest of the string
recv(buffersize[, flags]) -> data
replace (str, old, new[, maxsplit]) -> string
class StringIO([buffer])
Returns a 2-tuple: (<call-signature-lines>, <description-lines>)
"""
if doc is None or not doc.strip():
return ([], [])
limit = LINE_LIMIT
if not isinstance(doc, unicode):
# try to convert from utf8 to unicode; if we fail, too bad.
try:
doc = codecs.utf_8_decode(doc)[0]
except UnicodeDecodeError:
pass
doclines = doc.splitlines(0)
index = 0
siglines = []
desclines = []
# Skip leading blank lines.
while index < len(doclines):
if doclines[index].strip():
break
index += 1
# Parse out the call signature block, if it looks like there is one.
if index >= len(doclines):
match = None
else:
first = doclines[index].strip()
match = _gPySigLinePat.match(first)
if match:
# The 'doc' looks like it starts with a call signature block.
for i, line in enumerate(doclines[index:]):
if len(siglines) >= limit:
index = i
break
stripped = line.strip()
if not stripped:
index = i
break
match = _gPySigLinePat.match(stripped)
if not match:
index = i
break
# Now parse off what may be description content on the same line.
# ":", "-" or "--" separator: tail is description
# "-->" or "->" separator: tail if part of call sig
# "<==>" separator: tail if part of call sig
# other separtor: leave as part of call sig for now
descSeps = ("-", "--", ":")
groupd = match.groupdict()
retval, head, sep, tail = (
groupd.get("retval"), groupd.get("head"),
groupd.get("sep"), groupd.get("tail"))
if retval:
siglines.append(head + " -> " + retval)
if tail and sep in descSeps:
desclines.append(tail)
elif tail and sep in descSeps:
siglines.append(head)
desclines.append(tail)
else:
siglines.append(stripped)
else:
index = len(doclines)
if not siglines and fallbackCallSig:
siglines = fallbackCallSig
# Parse out the description block.
if desclines:
# Use what we have already. Just need to wrap it.
desclines = textwrap.wrap(' '.join(desclines), LINE_WIDTH)
else:
doclines = doclines[index:]
# strip leading empty lines
while len(doclines) > 0 and not doclines[0].rstrip():
del doclines[0]
try:
skip_first_line = (doclines[0][0] not in (" \t"))
except IndexError:
skip_first_line = False # no lines, or first line is empty
desclines = dedent("\n".join(
doclines), skip_first_line=skip_first_line)
desclines = desclines.splitlines(0)
## debug logging
# f = open("parsePyFuncDoc.log", "a")
# if 0:
# f.write("\n---- %s:\n" % funcname)
# f.write(pformat(siglines)+"\n")
# f.write(pformat(desclines)+"\n")
# else:
# f.write("\n")
# if siglines:
# f.write("\n".join(siglines)+"\n")
# else:
# f.write("<no signature for '%s.%s'>\n" % (scope, funcname))
# for descline in desclines:
# f.write("\t%s\n" % descline)
# f.close()
return (siglines, desclines)
#---- debugging utilities
def unmark_text(markedup_text):
u"""Parse text with potential markup as follows and return
(<text>, <data-dict>).
"<|>" indicates the current position (pos), defaults to the end
of the text.
"<+>" indicates the trigger position (trg_pos), if present.
"<$>" indicates the start position (start_pos) for some kind of
of processing, if present.
"<N>" is a numbered marker. N can be any of 0-99. These positions
are returned as the associate number key in <data-dict>.
Note that the positions are in UTF-8 byte counts, not character counts.
This matches the behaviour of Scintilla positions.
E.g.:
>>> unmark_text('foo.<|>')
('foo.', {'pos': 4})
>>> unmark_text('foo.<|><+>')
('foo.', {'trg_pos': 4, 'pos': 4})
>>> unmark_text('foo.<+>ba<|>')
('foo.ba', {'trg_pos': 4, 'pos': 6})
>>> unmark_text('fo<|>o.<+>ba')
('foo.ba', {'trg_pos': 4, 'pos': 2})
>>> unmark_text('os.path.join<$>(<|>')
('os.path.join(', {'pos': 13, 'start_pos': 12})
>>> unmark_text('abc<3>defghi<2>jk<4>lm<1>nopqrstuvwxyz')
('abcdefghijklmnopqrstuvwxyz', {1: 13, 2: 9, 3: 3, 4: 11, 'pos': 26})
>>> unmark_text('Ůɳíčóďé<|>')
('Ůɳíčóďé', {'pos': 14})
See the matching markup_text() below.
"""
splitter = re.compile(r"(<(?:[\|\+\$\[\]<]|\d+)>)")
text = u"" if isinstance(markup_text, unicode) else ""
data = {}
posNameFromSymbol = {
"<|>": "pos",
"<+>": "trg_pos",
"<$>": "start_pos",
"<[>": "start_selection",
"<]>": "end_selection",
}
def byte_length(text):
if isinstance(text, unicode):
return len(text.encode("utf-8"))
return len(text)
bracketed_digits_re = re.compile(r'<\d+>$')
for token in splitter.split(markedup_text):
if token in posNameFromSymbol:
data[posNameFromSymbol[token]] = byte_length(text)
elif token == "<<>": # escape sequence
text += "<"
elif bracketed_digits_re.match(token):
data[int(token[1:-1])] = byte_length(text)
else:
text += token
if "pos" not in data:
data["pos"] = byte_length(text)
# sys.stderr.write(">> text:%r, data:%s\n" % (text, data))
return text, data
def markup_text(text, pos=None, trg_pos=None, start_pos=None):
"""Markup text with position markers.
See the matching unmark_text() above.
"""
positions_and_markers = []
if pos is not None:
positions_and_markers.append((pos, '<|>'))
if trg_pos is not None:
positions_and_markers.append((trg_pos, '<+>'))
if start_pos is not None:
positions_and_markers.append((start_pos, '<$>'))
positions_and_markers.sort()
if not isinstance(text, bytes):
text = text.encode("utf-8")
m_text = ""
m_pos = 0
for position, marker in positions_and_markers:
m_text += text[m_pos:position].decode('utf-8', 'ignore') + marker
m_pos = position
m_text += text[m_pos:].decode('utf-8', 'ignore')
return m_text
def lines_from_pos(unmarked_text, positions):
"""Get 1-based line numbers from positions
@param unmarked_text {str} The text to examine
@param positions {dict or list of int} Byte positions to look up
@returns {dict or list of int} Matching line numbers (1-based)
Given some text and either a list of positions, or a dict containing
positions as values, return a matching data structure with positions
replaced with the line number of the lines the positions are on. Positions
after the last line are assumed to be on a hypothetical line.
E.g.:
Assuming the following text with \n line endings, where each line is
exactly 20 characters long:
>>> text = '''
... line one
... line two
... line three
... '''.lstrip()
>>> lines_from_pos(text, [5, 15, 25, 55, 999])
[1, 1, 2, 3, 4]
>>> lines_from_pos(text, {"hello": 10, "moo": 20, "not": "an int"})
{'moo': 1, 'hello': 1}
"""
lines = unicode(unmarked_text).splitlines(True)
offsets = [0]
for line in lines:
offsets.append(offsets[-1] + len(line.encode("utf-8")))
try:
# assume a dict
keys = positions.iterkeys()
values = {}
except AttributeError:
# assume a list/tuple
keys = range(len(positions))
values = []
for key in keys:
try:
position = positions[key] - 0
except TypeError:
continue # not a number
line_no = bisect.bisect_left(offsets, position)
try:
values[key] = line_no
except IndexError:
if key == len(values):
values.append(line_no)
else:
raise
return values
# Recipe: banner (1.0.1) in C:\trentm\tm\recipes\cookbook
def banner(text, ch='=', length=78):
"""Return a banner line centering the given text.
"text" is the text to show in the banner. None can be given to have
no text.
"ch" (optional, default '=') is the banner line character (can
also be a short string to repeat).
"length" (optional, default 78) is the length of banner to make.
Examples:
>>> banner("Peggy Sue")
'================================= Peggy Sue =================================='
>>> banner("Peggy Sue", ch='-', length=50)
'------------------- Peggy Sue --------------------'
>>> banner("Pretty pretty pretty pretty Peggy Sue", length=40)
'Pretty pretty pretty pretty Peggy Sue'
"""
if text is None:
return ch * length
elif len(text) + 2 + len(ch)*2 > length:
# Not enough space for even one line char (plus space) around text.
return text
else:
remain = length - (len(text) + 2)
prefix_len = remain / 2
suffix_len = remain - prefix_len
if len(ch) == 1:
prefix = ch * prefix_len
suffix = ch * suffix_len
else:
prefix = ch * (prefix_len/len(ch)) + ch[:prefix_len % len(ch)]
suffix = ch * (suffix_len/len(ch)) + ch[:suffix_len % len(ch)]
return prefix + ' ' + text + ' ' + suffix
# Recipe: dedent (0.1.2) in C:\trentm\tm\recipes\cookbook
def _dedentlines(lines, tabsize=8, skip_first_line=False):
"""_dedentlines(lines, tabsize=8, skip_first_line=False) -> dedented lines
"lines" is a list of lines to dedent.
"tabsize" is the tab width to use for indent width calculations.
"skip_first_line" is a boolean indicating if the first line should
be skipped for calculating the indent width and for dedenting.
This is sometimes useful for docstrings and similar.
Same as dedent() except operates on a sequence of lines. Note: the
lines list is modified **in-place**.
"""
DEBUG = False
if DEBUG:
print "dedent: dedent(..., tabsize=%d, skip_first_line=%r)"\
% (tabsize, skip_first_line)
indents = []
margin = None
for i, line in enumerate(lines):
if i == 0 and skip_first_line:
continue
indent = 0
for ch in line:
if ch == ' ':
indent += 1
elif ch == '\t':
indent += tabsize - (indent % tabsize)
elif ch in '\r\n':
continue # skip all-whitespace lines
else:
break
else:
continue # skip all-whitespace lines
if DEBUG:
print "dedent: indent=%d: %r" % (indent, line)
if margin is None:
margin = indent
else:
margin = min(margin, indent)
if DEBUG:
print "dedent: margin=%r" % margin
if margin is not None and margin > 0:
for i, line in enumerate(lines):
if i == 0 and skip_first_line:
continue
removed = 0
for j, ch in enumerate(line):
if ch == ' ':
removed += 1
elif ch == '\t':
removed += tabsize - (removed % tabsize)
elif ch in '\r\n':
if DEBUG:
print "dedent: %r: EOL -> strip up to EOL" % line
lines[i] = lines[i][j:]
break
else:
raise ValueError("unexpected non-whitespace char %r in "
"line %r while removing %d-space margin"
% (ch, line, margin))
if DEBUG:
print "dedent: %r: %r -> removed %d/%d"\
% (line, ch, removed, margin)
if removed == margin:
lines[i] = lines[i][j+1:]
break
elif removed > margin:
lines[i] = ' '*(removed-margin) + lines[i][j+1:]
break
else:
if removed:
lines[i] = lines[i][removed:]
return lines
def dedent(text, tabsize=8, skip_first_line=False):
"""dedent(text, tabsize=8, skip_first_line=False) -> dedented text
"text" is the text to dedent.
"tabsize" is the tab width to use for indent width calculations.
"skip_first_line" is a boolean indicating if the first line should
be skipped for calculating the indent width and for dedenting.
This is sometimes useful for docstrings and similar.
textwrap.dedent(s), but don't expand tabs to spaces
"""
lines = text.splitlines(1)
_dedentlines(lines, tabsize=tabsize, skip_first_line=skip_first_line)
return ''.join(lines)
# Recipe: indent (0.2.1) in C:\trentm\tm\recipes\cookbook
def indent(s, width=4, skip_first_line=False):
"""indent(s, [width=4]) -> 's' indented by 'width' spaces
The optional "skip_first_line" argument is a boolean (default False)
indicating if the first line should NOT be indented.
"""
lines = s.splitlines(1)
indentstr = ' '*width
if skip_first_line:
return indentstr.join(lines)
else:
return indentstr + indentstr.join(lines)
def walk2(top, topdown=True, onerror=None, followlinks=False,
ondecodeerror=None):
"""A version of `os.walk` that adds support for handling errors for
files that cannot be decoded with the default encoding. (See bug 82268.)
By default `UnicodeDecodeError`s from the os.listdir() call are
ignored. If optional arg 'ondecodeerror' is specified, it should be a
function; it will be called with one argument, the `UnicodeDecodeError`
instance. It can report the error to continue with the walk, or
raise the exception to abort the walk.
"""
from os.path import join, isdir, islink
# We may not have read permission for top, in which case we can't
# get a list of the files the directory contains. os.path.walk
# always suppressed the exception then, rather than blow up for a
# minor reason when (say) a thousand readable directories are still
# left to visit. That logic is copied here.
try:
# Note that listdir and error are globals in this module due
# to earlier import-*.
names = os.listdir(top)
except os.error, err:
if onerror is not None:
onerror(err)
return
dirs, nondirs = [], []
for name in names:
try:
if isdir(join(top, name)):
dirs.append(name)
else:
nondirs.append(name)
except UnicodeDecodeError, err:
if ondecodeerror is not None:
ondecodeerror(err)
if topdown:
yield top, dirs, nondirs
for name in dirs:
path = join(top, name)
if followlinks or not islink(path):
for x in walk2(path, topdown, onerror, followlinks):
yield x
if not topdown:
yield top, dirs, nondirs
# Decorators useful for timing and profiling specific functions.
#
# timeit usage:
# Decorate the desired function and you'll get a print for how long
# each call to the function took.
#
# hotspotit usage:
# 1. decorate the desired function
# 2. run your code
# 3. run:
# python .../codeintel/support/show_stats.py .../<funcname>.prof
#
def timeit(func):
clock = (sys.platform == "win32" and time.clock or time.time)
def wrapper(*args, **kw):
start_time = clock()
try:
return func(*args, **kw)
finally:
total_time = clock() - start_time
print "%s took %.3fs" % (func.func_name, total_time)
return wrapper
def hotshotit(func):
def wrapper(*args, **kw):
import hotshot
global hotshotProfilers
prof_name = func.func_name+".prof"
profiler = hotshotProfilers.get(prof_name)
if profiler is None:
profiler = hotshot.Profile(prof_name)
hotshotProfilers[prof_name] = profiler
return profiler.runcall(func, *args, **kw)
return wrapper
_koCProfiler = None
def getProfiler():
global _koCProfiler
if _koCProfiler is None:
class _KoCProfileManager(object):
def __init__(self):
import atexit
import cProfile
from codeintel2.common import _xpcom_
self.prof = cProfile.Profile()
if _xpcom_:
from xpcom import components
_KoCProfileManager._com_interfaces_ = [
components.interfaces.nsIObserver]
obsSvc = components.classes["@mozilla.org/observer-service;1"].\
getService(
components.interfaces.nsIObserverService)
obsSvc.addObserver(self, 'xpcom-shutdown', False)
else:
atexit.register(self.atexit_handler)
def atexit_handler(self):
self.prof.print_stats(sort="time")
def observe(self, subject, topic, data):
if topic == "xpcom-shutdown":
self.atexit_handler()
_koCProfiler = _KoCProfileManager()
return _koCProfiler.prof
def profile_method(func):
def wrapper(*args, **kw):
return getProfiler().runcall(func, *args, **kw)
return wrapper
# Utility functions to perform sorting the same way as scintilla does it
# for the code-completion list.
def OrdPunctLast(value):
result = []
value = value.upper()
for ch in value:
i = ord(ch)
if i >= 0x21 and i <= 0x2F: # ch >= '!' && ch <= '/'
result.append(chr(i - ord("!") + ord('['))) # ch - '!' + '['
elif i >= 0x3A and i <= 0x40: # ch >= ':' && ch <= '@'
result.append(chr(i - ord(":") + ord('['))) # ch - ':' + '['
else:
result.append(ch)
return "".join(result)
def CompareNPunctLast(value1, value2):
# value 1 is smaller, return negative
# value 1 is equal, return 0
# value 1 is larger, return positive
return cmp(OrdPunctLast(value1), OrdPunctLast(value2))
# Utility function to make a lookup dictionary
def make_short_name_dict(names, length=3):
outdict = {}
for name in names:
if len(name) >= length:
shortname = name[:length]
l = outdict.get(shortname)
if not l:
outdict[shortname] = [name]
else:
l.append(name)
# pprint(outdict)
for values in outdict.values():
values.sort(CompareNPunctLast)
return outdict
def makePerformantLogger(logger):
"""Replaces the info() and debug() methods with dummy methods.
Assumes that the logging level does not change during runtime.
"""
if not logger.isEnabledFor(logging.INFO):
def _log_ignore(self, *args, **kwargs):
pass
logger.info = _log_ignore
if not logger.isEnabledFor(logging.DEBUG):
logger.debug = _log_ignore
#---- mainline self-test
if __name__ == "__main__":
import doctest
doctest.testmod()
|
chapmanb/cloudbiolinux
|
fabfile.py
|
"""Main Fabric deployment file for CloudBioLinux distribution.
This installs a standard set of useful biological applications on a remote
server. It is designed for bootstrapping a machine from scratch, as with new
Amazon EC2 instances.
Usage:
fab -H hostname -i private_key_file install_biolinux
which will call into the 'install_biolinux' method below. See the README for
more examples. hostname can be a named host in ~/.ssh/config
Requires:
Fabric http://docs.fabfile.org
PyYAML http://pyyaml.org/wiki/PyYAMLDocumentation
"""
import os
import sys
from datetime import datetime
from fabric.api import *
from fabric.contrib.files import *
import yaml
# use local cloudbio directory
for to_remove in [p for p in sys.path if p.find("cloudbiolinux-") > 0]:
sys.path.remove(to_remove)
sys.path.append(os.path.dirname(__file__))
import cloudbio
from cloudbio import libraries
from cloudbio.utils import _setup_logging, _configure_fabric_environment
from cloudbio.cloudman import _cleanup_ec2, _configure_cloudman
from cloudbio.cloudbiolinux import _cleanup_space, _freenx_scripts
from cloudbio.custom import shared
from cloudbio.package.shared import _yaml_to_packages
from cloudbio.package import brew, conda
from cloudbio.package import (_configure_and_install_native_packages,
_connect_native_packages, _print_shell_exports)
from cloudbio.package.nix import _setup_nix_sources, _nix_packages
from cloudbio.flavor.config import get_config_file
from cloudbio.config_management.puppet import _puppet_provision
from cloudbio.config_management.chef import _chef_provision, chef, _configure_chef
# ### Shared installation targets for all platforms
def install_biolinux(target=None, flavor=None):
"""Main entry point for installing BioLinux on a remote server.
`flavor` allows customization of CloudBioLinux behavior. It can either
be a flavor name that maps to a corresponding directory in contrib/flavor
or the path to a custom directory. This can contain:
- alternative package lists (main.yaml, packages.yaml, custom.yaml)
- custom python code (nameflavor.py) that hooks into the build machinery
`target` allows running only particular parts of the build process. Valid choices are:
- packages Install distro packages
- custom Install custom packages
- chef_recipes Provision chef recipes
- libraries Install programming language libraries
- post_install Setup CloudMan, FreeNX and other system services
- cleanup Remove downloaded files and prepare images for AMI builds
"""
_setup_logging(env)
time_start = _print_time_stats("Config", "start")
_check_fabric_version()
if env.ssh_config_path and os.path.isfile(os.path.expanduser(env.ssh_config_path)):
env.use_ssh_config = True
_configure_fabric_environment(env, flavor,
ignore_distcheck=(target is not None
and target in ["libraries", "custom"]))
env.logger.debug("Target is '%s'" % target)
env.logger.debug("Flavor is '%s'" % flavor)
_perform_install(target, flavor)
_print_time_stats("Config", "end", time_start)
if hasattr(env, "keep_isolated") and env.keep_isolated:
_print_shell_exports(env)
def _perform_install(target=None, flavor=None, more_custom_add=None):
"""
Once CBL/fabric environment is setup, this method actually
runs the required installation procedures.
See `install_biolinux` for full details on arguments
`target` and `flavor`.
"""
pkg_install, lib_install, custom_ignore, custom_add = _read_main_config()
if more_custom_add:
if custom_add is None:
custom_add = {}
for k, vs in more_custom_add.iteritems():
if k in custom_add:
custom_add[k].extend(vs)
else:
custom_add[k] = vs
if target is None or target == "packages":
env.keep_isolated = getattr(env, "keep_isolated", "false").lower() in ["true", "yes"]
# Only touch system information if we're not an isolated installation
if not env.keep_isolated:
# can only install native packages if we have sudo access or are root
if env.use_sudo or env.safe_run_output("whoami").strip() == "root":
_configure_and_install_native_packages(env, pkg_install)
else:
_connect_native_packages(env, pkg_install, lib_install)
if env.nixpkgs: # ./doc/nixpkgs.md
_setup_nix_sources()
_nix_packages(pkg_install)
if target is None or target == "custom":
_custom_installs(pkg_install, custom_ignore, custom_add)
if target is None or target == "chef_recipes":
_provision_chef_recipes(pkg_install, custom_ignore)
if target is None or target == "puppet_classes":
_provision_puppet_classes(pkg_install, custom_ignore)
if target is None or target == "brew":
install_brew(flavor=flavor, automated=True)
if target is None or target == "conda":
install_conda(flavor=flavor, automated=True)
if target is None or target == "libraries":
_do_library_installs(lib_install)
if target is None or target == "post_install":
env.flavor.post_install()
if "is_ec2_image" in env and env.is_ec2_image.upper() in ["TRUE", "YES"]:
_freenx_scripts(env)
if pkg_install is not None and 'cloudman' in pkg_install:
_configure_cloudman(env)
if target is None or target == "cleanup":
if env.use_sudo:
_cleanup_space(env)
if "is_ec2_image" in env and env.is_ec2_image.upper() in ["TRUE", "YES"]:
_cleanup_ec2(env)
def _print_time_stats(action, event, prev_time=None):
""" A convenience method for displaying time event during configuration.
:type action: string
:param action: Indicates type of action (eg, Config, Lib install, Pkg install)
:type event: string
:param event: The monitoring event (eg, start, stop)
:type prev_time: datetime
:param prev_time: A timeststamp of a previous event. If provided, duration between
the time the method is called and the time stamp is included in
the printout
:rtype: datetime
:return: A datetime timestamp of when the method was called
"""
time = datetime.utcnow()
s = "{0} {1} time: {2}".format(action, event, time)
if prev_time: s += "; duration: {0}".format(str(time-prev_time))
env.logger.info(s)
return time
def _check_fabric_version():
"""Checks for fabric version installed
"""
version = env.version
if int(version.split(".")[0]) < 1:
raise NotImplementedError("Please install fabric version 1 or higher")
def _custom_installs(to_install, ignore=None, add=None):
if not env.safe_exists(env.local_install) and env.local_install:
env.safe_run("mkdir -p %s" % env.local_install)
pkg_config = get_config_file(env, "custom.yaml").base
packages, pkg_to_group = _yaml_to_packages(pkg_config, to_install)
packages = [p for p in packages if ignore is None or p not in ignore]
if add is not None:
for key, vals in add.iteritems():
for v in vals:
pkg_to_group[v] = key
packages.append(v)
for p in env.flavor.rewrite_config_items("custom", packages):
install_custom(p, True, pkg_to_group)
def _provision_chef_recipes(to_install, ignore=None):
"""
Much like _custom_installs, read config file, determine what to install,
and install it.
"""
pkg_config = get_config_file(env, "chef_recipes.yaml").base
packages, _ = _yaml_to_packages(pkg_config, to_install)
packages = [p for p in packages if ignore is None or p not in ignore]
recipes = [recipe for recipe in env.flavor.rewrite_config_items("chef_recipes", packages)]
if recipes: # Don't bother running chef if nothing to configure
install_chef_recipe(recipes, True)
def _provision_puppet_classes(to_install, ignore=None):
"""
Much like _custom_installs, read config file, determine what to install,
and install it.
"""
pkg_config = get_config_file(env, "puppet_classes.yaml").base
packages, _ = _yaml_to_packages(pkg_config, to_install)
packages = [p for p in packages if ignore is None or p not in ignore]
classes = [recipe for recipe in env.flavor.rewrite_config_items("puppet_classes", packages)]
if classes: # Don't bother running chef if nothing to configure
install_puppet_class(classes, True)
def install_chef_recipe(recipe, automated=False, flavor=None):
"""Install one or more chef recipes by name.
Usage: fab [-i key] [-u user] -H host install_chef_recipe:recipe
:type recipe: string or list
:param recipe: TODO
:type automated: bool
:param automated: If set to True, the environment is not loaded.
"""
_setup_logging(env)
if not automated:
_configure_fabric_environment(env, flavor)
time_start = _print_time_stats("Chef provision for recipe(s) '{0}'".format(recipe), "start")
_configure_chef(env, chef)
recipes = recipe if isinstance(recipe, list) else [recipe]
for recipe_to_add in recipes:
chef.add_recipe(recipe_to_add)
_chef_provision(env, recipes)
_print_time_stats("Chef provision for recipe(s) '%s'" % recipe, "end", time_start)
def install_puppet_class(classes, automated=False, flavor=None):
"""Install one or more puppet classes by name.
Usage: fab [-i key] [-u user] -H host install_puppet_class:class
:type classes: string or list
:param classes: TODO
:type automated: bool
:param automated: If set to True, the environment is not loaded.
"""
_setup_logging(env)
if not automated:
_configure_fabric_environment(env, flavor)
time_start = _print_time_stats("Puppet provision for class(es) '{0}'".format(classes), "start")
classes = classes if isinstance(classes, list) else [classes]
_puppet_provision(env, classes)
_print_time_stats("Puppet provision for classes(s) '%s'" % classes, "end", time_start)
def install_custom(p, automated=False, pkg_to_group=None, flavor=None):
"""
Install a single custom program or package by name.
This method fetches program name from ``config/custom.yaml`` and delegates
to a method in ``custom/*name*.py`` to proceed with the installation.
Alternatively, if a program install method is defined in the appropriate
package, it will be called directly (see param ``p``).
Usage: fab [-i key] [-u user] -H host install_custom:program_name
:type p: string
:param p: A name of the custom program to install. This has to be either a name
that is listed in ``custom.yaml`` as a subordinate to a group name or a
program name whose install method is defined in either ``cloudbio`` or
``custom`` packages
(e.g., ``cloudbio/custom/cloudman.py -> install_cloudman``).
:type automated: bool
:param automated: If set to True, the environment is not loaded and reading of
the ``custom.yaml`` is skipped.
"""
p = p.lower() # All packages listed in custom.yaml are in lower case
if not automated:
_setup_logging(env)
_configure_fabric_environment(env, flavor, ignore_distcheck=True)
pkg_config = get_config_file(env, "custom.yaml").base
packages, pkg_to_group = _yaml_to_packages(pkg_config, None)
time_start = _print_time_stats("Custom install for '{0}'".format(p), "start")
fn = _custom_install_function(env, p, pkg_to_group)
fn(env)
## TODO: Replace the previous 4 lines with the following one, barring
## objections. Slightly different behavior because pkg_to_group will be
## loaded regardless of automated if it is None, but IMO this shouldn't
## matter because the following steps look like they would fail if
## automated is True and pkg_to_group is None.
# _install_custom(p, pkg_to_group)
_print_time_stats("Custom install for '%s'" % p, "end", time_start)
def _install_custom(p, pkg_to_group=None):
if pkg_to_group is None:
pkg_config = get_config_file(env, "custom.yaml").base
packages, pkg_to_group = _yaml_to_packages(pkg_config, None)
fn = _custom_install_function(env, p, pkg_to_group)
fn(env)
def install_brew(p=None, version=None, flavor=None, automated=False):
"""Top level access to homebrew/linuxbrew packages.
p is a package name to install, or all configured packages if not specified.
"""
if not automated:
_setup_logging(env)
_configure_fabric_environment(env, flavor, ignore_distcheck=True)
if p is not None:
if version:
p = "%s==%s" % (p, version)
brew.install_packages(env, packages=[p])
else:
pkg_install = _read_main_config()[0]
brew.install_packages(env, to_install=pkg_install)
def install_conda(p=None, flavor=None, automated=False):
if not automated:
_setup_logging(env)
_configure_fabric_environment(env, flavor, ignore_distcheck=True)
if p is not None:
conda.install_packages(env, packages=[p])
else:
pkg_install = _read_main_config()[0]
conda.install_packages(env, to_install=pkg_install)
def _custom_install_function(env, p, pkg_to_group):
"""
Find custom install function to execute based on package name to
pkg_to_group dict.
"""
try:
# Allow direct calling of a program install method, even if the program
# is not listed in the custom list (ie, not contained as a key value in
# pkg_to_group). For an example, see 'install_cloudman' or use p=cloudman.
mod_name = pkg_to_group[p] if p in pkg_to_group else p
env.logger.debug("Importing module cloudbio.custom.%s" % mod_name)
mod = __import__("cloudbio.custom.%s" % mod_name,
fromlist=["cloudbio", "custom"])
except ImportError:
raise ImportError("Need to write module cloudbio.custom.%s" %
pkg_to_group[p])
replace_chars = ["-"]
try:
for to_replace in replace_chars:
p = p.replace(to_replace, "_")
env.logger.debug("Looking for custom install function %s.install_%s"
% (mod.__name__, p))
fn = getattr(mod, "install_%s" % p)
except AttributeError:
raise ImportError("Need to write a install_%s function in custom.%s"
% (p, pkg_to_group[p]))
return fn
def _read_main_config():
"""Pull a list of groups to install based on our main configuration YAML.
Reads 'main.yaml' and returns packages and libraries
"""
yaml_file = get_config_file(env, "main.yaml").base
with open(yaml_file) as in_handle:
full_data = yaml.safe_load(in_handle)
packages = full_data.get('packages', [])
packages = env.flavor.rewrite_config_items("main_packages", packages)
libraries = full_data.get('libraries', [])
custom_ignore = full_data.get('custom_ignore', [])
custom_add = full_data.get("custom_additional")
if packages is None: packages = []
if libraries is None: libraries = []
if custom_ignore is None: custom_ignore = []
env.logger.info("Meta-package information from {2}\n- Packages: {0}\n- Libraries: "
"{1}".format(",".join(packages), ",".join(libraries), yaml_file))
return packages, sorted(libraries), custom_ignore, custom_add
# ### Library specific installation code
def _python_library_installer(config):
"""Install python specific libraries using pip, conda and easy_install.
Handles using isolated anaconda environments.
"""
if shared._is_anaconda(env):
conda_bin = shared._conda_cmd(env)
for pname in env.flavor.rewrite_config_items("python", config.get("conda", [])):
env.safe_run("{0} install --yes {1}".format(conda_bin, pname))
cmd = env.safe_run
with settings(warn_only=True):
cmd("%s -U distribute" % os.path.join(os.path.dirname(conda_bin), "easy_install"))
else:
pip_bin = shared._pip_cmd(env)
ei_bin = pip_bin.replace("pip", "easy_install")
env.safe_sudo("%s -U pip" % ei_bin)
with settings(warn_only=True):
env.safe_sudo("%s -U distribute" % ei_bin)
cmd = env.safe_sudo
for pname in env.flavor.rewrite_config_items("python", config['pypi']):
cmd("{0} install --upgrade {1} --allow-unverified {1} --allow-external {1}".format(shared._pip_cmd(env), pname)) # fixes problem with packages not being in pypi
def _ruby_library_installer(config):
"""Install ruby specific gems.
"""
gem_ext = getattr(env, "ruby_version_ext", "")
def _cur_gems():
with settings(
hide('warnings', 'running', 'stdout', 'stderr')):
gem_info = env.safe_run_output("gem%s list --no-versions" % gem_ext)
return [l.rstrip("\r") for l in gem_info.split("\n") if l.rstrip("\r")]
installed = _cur_gems()
for gem in env.flavor.rewrite_config_items("ruby", config['gems']):
# update current gems only to check for new installs
if gem not in installed:
installed = _cur_gems()
if gem in installed:
env.safe_sudo("gem%s update %s" % (gem_ext, gem))
else:
env.safe_sudo("gem%s install %s" % (gem_ext, gem))
def _perl_library_installer(config):
"""Install perl libraries from CPAN with cpanminus.
"""
with shared._make_tmp_dir() as tmp_dir:
with cd(tmp_dir):
env.safe_run("wget --no-check-certificate -O cpanm "
"https://raw.github.com/miyagawa/cpanminus/master/cpanm")
env.safe_run("chmod a+rwx cpanm")
env.safe_sudo("mv cpanm %s/bin" % env.system_install)
sudo_str = "--sudo" if env.use_sudo else ""
for lib in env.flavor.rewrite_config_items("perl", config['cpan']):
# Need to hack stdin because of some problem with cpanminus script that
# causes fabric to hang
# http://agiletesting.blogspot.com/2010/03/getting-past-hung-remote-processes-in.html
env.safe_run("cpanm %s --skip-installed --notest %s < /dev/null" % (sudo_str, lib))
def _haskell_library_installer(config):
"""Install haskell libraries using cabal.
"""
run("cabal update")
for lib in config["cabal"]:
sudo_str = "--root-cmd=sudo" if env.use_sudo else ""
env.safe_run("cabal install %s --global %s" % (sudo_str, lib))
lib_installers = {
"r-libs" : libraries.r_library_installer,
"python-libs" : _python_library_installer,
"ruby-libs" : _ruby_library_installer,
"perl-libs" : _perl_library_installer,
"haskell-libs": _haskell_library_installer,
}
def install_libraries(language):
"""High level target to install libraries for a specific language.
"""
_setup_logging(env)
_check_fabric_version()
_configure_fabric_environment(env, ignore_distcheck=True)
_do_library_installs(["%s-libs" % language])
def _do_library_installs(to_install):
for iname in to_install:
yaml_file = get_config_file(env, "%s.yaml" % iname).base
with open(yaml_file) as in_handle:
config = yaml.safe_load(in_handle)
lib_installers[iname](config)
|
inclement/kivy
|
kivy/core/window/window_pygame.py
|
'''
Window Pygame: windowing provider based on Pygame
.. warning::
Pygame has been deprecated and will be removed in the release after Kivy
1.11.0.
'''
__all__ = ('WindowPygame', )
# fail early if possible
import pygame
from kivy.compat import PY2
from kivy.core.window import WindowBase
from kivy.core import CoreCriticalException
from os import environ
from os.path import exists, join
from kivy.config import Config
from kivy import kivy_data_dir
from kivy.base import ExceptionManager
from kivy.logger import Logger
from kivy.base import stopTouchApp, EventLoop
from kivy.utils import platform, deprecated
from kivy.resources import resource_find
try:
android = None
if platform == 'android':
import android
except ImportError:
pass
# late binding
glReadPixels = GL_RGBA = GL_UNSIGNED_BYTE = None
class WindowPygame(WindowBase):
@deprecated(
msg='Pygame has been deprecated and will be removed after 1.11.0')
def __init__(self, *largs, **kwargs):
super(WindowPygame, self).__init__(*largs, **kwargs)
def create_window(self, *largs):
# ensure the mouse is still not up after window creation, otherwise, we
# have some weird bugs
self.dispatch('on_mouse_up', 0, 0, 'all', [])
# force display to show (available only for fullscreen)
displayidx = Config.getint('graphics', 'display')
if 'SDL_VIDEO_FULLSCREEN_HEAD' not in environ and displayidx != -1:
environ['SDL_VIDEO_FULLSCREEN_HEAD'] = '%d' % displayidx
# init some opengl, same as before.
self.flags = pygame.HWSURFACE | pygame.OPENGL | pygame.DOUBLEBUF
# right now, activate resizable window only on linux.
# on window / macosx, the opengl context is lost, and we need to
# reconstruct everything. Check #168 for a state of the work.
if platform in ('linux', 'macosx', 'win') and \
Config.getboolean('graphics', 'resizable'):
self.flags |= pygame.RESIZABLE
try:
pygame.display.init()
except pygame.error as e:
raise CoreCriticalException(e.message)
multisamples = Config.getint('graphics', 'multisamples')
if multisamples > 0:
pygame.display.gl_set_attribute(pygame.GL_MULTISAMPLEBUFFERS, 1)
pygame.display.gl_set_attribute(pygame.GL_MULTISAMPLESAMPLES,
multisamples)
pygame.display.gl_set_attribute(pygame.GL_DEPTH_SIZE, 16)
pygame.display.gl_set_attribute(pygame.GL_STENCIL_SIZE, 1)
pygame.display.set_caption(self.title)
if self.position == 'auto':
self._pos = None
elif self.position == 'custom':
self._pos = self.left, self.top
else:
raise ValueError('position token in configuration accept only '
'"auto" or "custom"')
if self._fake_fullscreen:
if not self.borderless:
self.fullscreen = self._fake_fullscreen = False
elif not self.fullscreen or self.fullscreen == 'auto':
self.borderless = self._fake_fullscreen = False
if self.fullscreen == 'fake':
self.borderless = self._fake_fullscreen = True
Logger.warning("The 'fake' fullscreen option has been "
"deprecated, use Window.borderless or the "
"borderless Config option instead.")
if self.fullscreen == 'fake' or self.borderless:
Logger.debug('WinPygame: Set window to borderless mode.')
self.flags |= pygame.NOFRAME
# If no position set in borderless mode, we always need
# to set the position. So use 0, 0.
if self._pos is None:
self._pos = (0, 0)
environ['SDL_VIDEO_WINDOW_POS'] = '%d,%d' % self._pos
elif self.fullscreen in ('auto', True):
Logger.debug('WinPygame: Set window to fullscreen mode')
self.flags |= pygame.FULLSCREEN
elif self._pos is not None:
environ['SDL_VIDEO_WINDOW_POS'] = '%d,%d' % self._pos
# never stay with a None pos, application using w.center will be fired.
self._pos = (0, 0)
# prepare keyboard
repeat_delay = int(Config.get('kivy', 'keyboard_repeat_delay'))
repeat_rate = float(Config.get('kivy', 'keyboard_repeat_rate'))
pygame.key.set_repeat(repeat_delay, int(1000. / repeat_rate))
# set window icon before calling set_mode
try:
filename_icon = self.icon or Config.get('kivy', 'window_icon')
if filename_icon == '':
logo_size = 32
if platform == 'macosx':
logo_size = 512
elif platform == 'win':
logo_size = 64
filename_icon = 'kivy-icon-{}.png'.format(logo_size)
filename_icon = resource_find(
join(kivy_data_dir, 'logo', filename_icon))
self.set_icon(filename_icon)
except:
Logger.exception('Window: cannot set icon')
# try to use mode with multisamples
try:
self._pygame_set_mode()
except pygame.error as e:
if multisamples:
Logger.warning('WinPygame: Video: failed (multisamples=%d)' %
multisamples)
Logger.warning('WinPygame: trying without antialiasing')
pygame.display.gl_set_attribute(
pygame.GL_MULTISAMPLEBUFFERS, 0)
pygame.display.gl_set_attribute(
pygame.GL_MULTISAMPLESAMPLES, 0)
multisamples = 0
try:
self._pygame_set_mode()
except pygame.error as e:
raise CoreCriticalException(e.message)
else:
raise CoreCriticalException(e.message)
if pygame.RESIZABLE & self.flags:
self._pygame_set_mode()
info = pygame.display.Info()
self._size = (info.current_w, info.current_h)
# self.dispatch('on_resize', *self._size)
# in order to debug futur issue with pygame/display, let's show
# more debug output.
Logger.debug('Window: Display driver ' + pygame.display.get_driver())
Logger.debug('Window: Actual window size: %dx%d',
info.current_w, info.current_h)
if platform != 'android':
# unsupported platform, such as android that doesn't support
# gl_get_attribute.
Logger.debug(
'Window: Actual color bits r%d g%d b%d a%d',
pygame.display.gl_get_attribute(pygame.GL_RED_SIZE),
pygame.display.gl_get_attribute(pygame.GL_GREEN_SIZE),
pygame.display.gl_get_attribute(pygame.GL_BLUE_SIZE),
pygame.display.gl_get_attribute(pygame.GL_ALPHA_SIZE))
Logger.debug(
'Window: Actual depth bits: %d',
pygame.display.gl_get_attribute(pygame.GL_DEPTH_SIZE))
Logger.debug(
'Window: Actual stencil bits: %d',
pygame.display.gl_get_attribute(pygame.GL_STENCIL_SIZE))
Logger.debug(
'Window: Actual multisampling samples: %d',
pygame.display.gl_get_attribute(pygame.GL_MULTISAMPLESAMPLES))
super(WindowPygame, self).create_window()
# set mouse visibility
self._set_cursor_state(self.show_cursor)
# if we are on android platform, automatically create hooks
if android:
from kivy.support import install_android
install_android()
def close(self):
pygame.display.quit()
super(WindowPygame, self).close()
def on_title(self, instance, value):
if self.initialized:
pygame.display.set_caption(self.title)
def set_icon(self, filename):
if not exists(filename):
return False
try:
if platform == 'win':
try:
if self._set_icon_win(filename):
return True
except:
# fallback on standard loading then.
pass
# for all others platform, or if the ico is not available, use the
# default way to set it.
self._set_icon_standard(filename)
super(WindowPygame, self).set_icon(filename)
except:
Logger.exception('WinPygame: unable to set icon')
def _set_icon_standard(self, filename):
if PY2:
try:
im = pygame.image.load(filename)
except UnicodeEncodeError:
im = pygame.image.load(filename.encode('utf8'))
else:
im = pygame.image.load(filename)
if im is None:
raise Exception('Unable to load window icon (not found)')
pygame.display.set_icon(im)
def _set_icon_win(self, filename):
# ensure the window ico is ended by ico
if not filename.endswith('.ico'):
filename = '{}.ico'.format(filename.rsplit('.', 1)[0])
if not exists(filename):
return False
import win32api
import win32gui
import win32con
hwnd = pygame.display.get_wm_info()['window']
icon_big = win32gui.LoadImage(
None, filename, win32con.IMAGE_ICON,
48, 48, win32con.LR_LOADFROMFILE)
icon_small = win32gui.LoadImage(
None, filename, win32con.IMAGE_ICON,
16, 16, win32con.LR_LOADFROMFILE)
win32api.SendMessage(
hwnd, win32con.WM_SETICON, win32con.ICON_SMALL, icon_small)
win32api.SendMessage(
hwnd, win32con.WM_SETICON, win32con.ICON_BIG, icon_big)
return True
def _set_cursor_state(self, value):
pygame.mouse.set_visible(value)
def screenshot(self, *largs, **kwargs):
global glReadPixels, GL_RGBA, GL_UNSIGNED_BYTE
filename = super(WindowPygame, self).screenshot(*largs, **kwargs)
if filename is None:
return None
if glReadPixels is None:
from kivy.graphics.opengl import (glReadPixels, GL_RGBA,
GL_UNSIGNED_BYTE)
width, height = self.system_size
data = glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE)
data = bytes(bytearray(data))
surface = pygame.image.fromstring(data, (width, height), 'RGBA', True)
pygame.image.save(surface, filename)
Logger.debug('Window: Screenshot saved at <%s>' % filename)
return filename
def flip(self):
pygame.display.flip()
super(WindowPygame, self).flip()
@deprecated
def toggle_fullscreen(self):
if self.flags & pygame.FULLSCREEN:
self.flags &= ~pygame.FULLSCREEN
else:
self.flags |= pygame.FULLSCREEN
self._pygame_set_mode()
def mainloop(self):
for event in pygame.event.get():
# kill application (SIG_TERM)
if event.type == pygame.QUIT:
if self.dispatch('on_request_close'):
continue
EventLoop.quit = True
self.close()
# mouse move
elif event.type == pygame.MOUSEMOTION:
x, y = event.pos
self.mouse_pos = x, self.system_size[1] - y
# don't dispatch motion if no button are pressed
if event.buttons == (0, 0, 0):
continue
self._mouse_x = x
self._mouse_y = y
self._mouse_meta = self.modifiers
self.dispatch('on_mouse_move', x, y, self.modifiers)
# mouse action
elif event.type in (pygame.MOUSEBUTTONDOWN,
pygame.MOUSEBUTTONUP):
self._pygame_update_modifiers()
x, y = event.pos
btn = 'left'
if event.button == 3:
btn = 'right'
elif event.button == 2:
btn = 'middle'
elif event.button == 4:
btn = 'scrolldown'
elif event.button == 5:
btn = 'scrollup'
elif event.button == 6:
btn = 'scrollright'
elif event.button == 7:
btn = 'scrollleft'
eventname = 'on_mouse_down'
if event.type == pygame.MOUSEBUTTONUP:
eventname = 'on_mouse_up'
self._mouse_x = x
self._mouse_y = y
self._mouse_meta = self.modifiers
self._mouse_btn = btn
self._mouse_down = eventname == 'on_mouse_down'
self.dispatch(eventname, x, y, btn, self.modifiers)
# joystick action
elif event.type == pygame.JOYAXISMOTION:
self.dispatch('on_joy_axis', event.joy, event.axis,
event.value)
elif event.type == pygame.JOYHATMOTION:
self.dispatch('on_joy_hat', event.joy, event.hat, event.value)
elif event.type == pygame.JOYBALLMOTION:
self.dispatch('on_joy_ball', event.joy, event.ballid,
event.rel[0], event.rel[1])
elif event.type == pygame.JOYBUTTONDOWN:
self.dispatch('on_joy_button_down', event.joy, event.button)
elif event.type == pygame.JOYBUTTONUP:
self.dispatch('on_joy_button_up', event.joy, event.button)
# keyboard action
elif event.type in (pygame.KEYDOWN, pygame.KEYUP):
self._pygame_update_modifiers(event.mod)
# atm, don't handle keyup
if event.type == pygame.KEYUP:
self.dispatch('on_key_up', event.key,
event.scancode)
continue
# don't dispatch more key if down event is accepted
if self.dispatch('on_key_down', event.key,
event.scancode, event.unicode,
self.modifiers):
continue
self.dispatch('on_keyboard', event.key,
event.scancode, event.unicode,
self.modifiers)
# video resize
elif event.type == pygame.VIDEORESIZE:
self._size = event.size
self.update_viewport()
elif event.type == pygame.VIDEOEXPOSE:
self.canvas.ask_update()
# ignored event
elif event.type == pygame.ACTIVEEVENT:
pass
# drop file (pygame patch needed)
elif event.type == pygame.USEREVENT and \
hasattr(pygame, 'USEREVENT_DROPFILE') and \
event.code == pygame.USEREVENT_DROPFILE:
self.dispatch('on_dropfile', event.filename)
'''
# unhandled event !
else:
Logger.debug('WinPygame: Unhandled event %s' % str(event))
'''
if not pygame.display.get_active():
pygame.time.wait(100)
#
# Pygame wrapper
#
def _pygame_set_mode(self, size=None):
if size is None:
size = self.size
if self.fullscreen == 'auto':
pygame.display.set_mode((0, 0), self.flags)
else:
pygame.display.set_mode(size, self.flags)
def _pygame_update_modifiers(self, mods=None):
# Available mod, from dir(pygame)
# 'KMOD_ALT', 'KMOD_CAPS', 'KMOD_CTRL', 'KMOD_LALT',
# 'KMOD_LCTRL', 'KMOD_LMETA', 'KMOD_LSHIFT', 'KMOD_META',
# 'KMOD_MODE', 'KMOD_NONE'
if mods is None:
mods = pygame.key.get_mods()
self._modifiers = []
if mods & (pygame.KMOD_SHIFT | pygame.KMOD_LSHIFT):
self._modifiers.append('shift')
if mods & (pygame.KMOD_ALT | pygame.KMOD_LALT):
self._modifiers.append('alt')
if mods & (pygame.KMOD_CTRL | pygame.KMOD_LCTRL):
self._modifiers.append('ctrl')
if mods & (pygame.KMOD_META | pygame.KMOD_LMETA):
self._modifiers.append('meta')
def request_keyboard(self, callback, target, input_type='text'):
keyboard = super(WindowPygame, self).request_keyboard(
callback, target, input_type)
if android and not self.allow_vkeyboard:
android.show_keyboard(target, input_type)
return keyboard
def release_keyboard(self, *largs):
super(WindowPygame, self).release_keyboard(*largs)
if android:
android.hide_keyboard()
return True
|
yunojuno/django-onfido
|
onfido/settings.py
|
from os import getenv
from django.conf import settings
def _setting(key, default):
return getenv(key, default) or getattr(settings, key, default)
# API key from evnironment by default
API_KEY = _setting("ONFIDO_API_KEY", None)
# Webhook token - see https://documentation.onfido.com/#webhooks
WEBHOOK_TOKEN = _setting("ONFIDO_WEBHOOK_TOKEN", None)
# token must be a bytestring for HMAC function to work
WEBHOOK_TOKEN = str.encode(WEBHOOK_TOKEN) if WEBHOOK_TOKEN else None
# Set to False to turn off event logging
LOG_EVENTS = _setting("ONFIDO_LOG_EVENTS", True)
# Set to True to bypass request verification (NOT RECOMMENDED)
TEST_MODE = _setting("ONFIDO_TEST_MODE", False)
def DEFAULT_REPORT_SCRUBBER(raw):
"""Remove breakdown and properties."""
return {k: v for k, v in raw.items() if k not in ("breakdown", "properties")}
def DEFAULT_APPLICANT_SCRUBBER(raw):
"""Remove all personal data."""
return {k: v for k, v in raw.items() if k in ("id", "href", "created_at")}
# functions used to scrub sensitive data from reports
scrub_report_data = (
getattr(settings, "ONFIDO_REPORT_SCRUBBER", None) or DEFAULT_REPORT_SCRUBBER
)
scrub_applicant_data = (
getattr(settings, "ONFIDO_APPLICANT_SCRUBBER", None) or DEFAULT_APPLICANT_SCRUBBER
)
|
Azure/azure-sdk-for-python
|
sdk/identity/azure-identity/azure/identity/_internal/aadclient_certificate.py
|
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import base64
from typing import TYPE_CHECKING
from cryptography import x509
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from cryptography.hazmat.backends import default_backend
import six
if TYPE_CHECKING:
# pylint:disable=unused-import,ungrouped-imports
from typing import Optional
class AadClientCertificate(object):
"""Wraps 'cryptography' to provide the crypto operations AadClient requires for certificate authentication.
:param bytes pem_bytes: bytes of a a PEM-encoded certificate including the (RSA) private key
:param bytes password: (optional) the certificate's password
"""
def __init__(self, pem_bytes, password=None):
# type: (bytes, Optional[bytes]) -> None
private_key = serialization.load_pem_private_key(pem_bytes, password=password, backend=default_backend())
if not isinstance(private_key, RSAPrivateKey):
raise ValueError("The certificate must have an RSA private key because RS256 is used for signing")
self._private_key = private_key
cert = x509.load_pem_x509_certificate(pem_bytes, default_backend())
fingerprint = cert.fingerprint(hashes.SHA1()) # nosec
self._thumbprint = six.ensure_str(base64.urlsafe_b64encode(fingerprint), encoding="utf-8")
@property
def thumbprint(self):
# type: () -> str
"""The certificate's SHA1 thumbprint as a base64url-encoded string"""
return self._thumbprint
def sign(self, plaintext):
# type: (bytes) -> bytes
"""Sign bytes using RS256"""
return self._private_key.sign(plaintext, padding.PKCS1v15(), hashes.SHA256())
|
NedYork/viper
|
tests/parser/syntax/test_public.py
|
import pytest
from viper import compiler
valid_list = [
"""
x: public(num)
""",
"""
x: public(num(wei / sec))
y: public(num(wei / sec ** 2))
z: public(num(1 / sec))
def foo() -> num(sec ** 2):
return self.x / self.y / self.z
"""
]
@pytest.mark.parametrize('good_code', valid_list)
def test_public_success(good_code):
assert compiler.compile(good_code) is not None
|
datamade/parserator
|
parserator/manual_labeling.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
from builtins import zip
from builtins import str
from builtins import range
from lxml import etree
import sys
import os.path
from . import data_prep_utils
import re
import csv
from argparse import ArgumentParser
from collections import OrderedDict
import io
if sys.version < '3' :
from backports import csv
else :
import csv
def consoleLabel(raw_strings, labels, module):
print('\nStart console labeling!\n')
valid_input_tags = OrderedDict([(str(i), label) for i, label in enumerate(labels)])
printHelp(valid_input_tags)
valid_responses = ['y', 'n', 's', 'f', '']
finished = False
strings_left_to_tag = raw_strings.copy()
total_strings = len(raw_strings)
tagged_strings = set([])
for i, raw_sequence in enumerate(raw_strings, 1):
if not finished:
print('\n(%s of %s)' % (i, total_strings))
print('-'*50)
print('STRING: %s' %raw_sequence)
preds = module.parse(raw_sequence)
user_input = None
while user_input not in valid_responses :
friendly_repr = [(token[0].strip(), token[1]) for token in preds]
print_table(friendly_repr)
sys.stderr.write('Is this correct? (y)es / (n)o / (s)kip / (f)inish tagging / (h)elp\n')
user_input = sys.stdin.readline().strip()
if user_input =='y':
tagged_strings.add(tuple(preds))
strings_left_to_tag.remove(raw_sequence)
elif user_input =='n':
corrected_string = manualTagging(preds,
valid_input_tags)
tagged_strings.add(tuple(corrected_string))
strings_left_to_tag.remove(raw_sequence)
elif user_input in ('h', 'help', '?') :
printHelp(valid_input_tags)
elif user_input in ('' or 's') :
print('Skipped\n')
elif user_input == 'f':
finished = True
print('Done! Yay!')
return tagged_strings, strings_left_to_tag
def print_table(table):
col_width = [max(len(x) for x in col) for col in zip(*table)]
for line in table:
print(u"| %s |" % " | ".join(u"{:{}}".format(x, col_width[i])
for i, x in enumerate(line)))
def manualTagging(preds, valid_input_tags):
tagged_sequence = []
for token, predicted_tag in preds:
while True:
print('What is \'%s\' ? If %s hit return' % (token, predicted_tag))
user_choice = sys.stdin.readline().strip()
if user_choice == '' :
tag = predicted_tag
break
elif user_choice in valid_input_tags :
tag = valid_input_tags[user_choice]
break
elif user_choice in ('h', 'help', '?') :
printHelp(valid_input_tags)
elif user_choice == 'oops':
print('No worries! Let\'s start over in labeling this string')
tagged_sequence_redo = manualTagging(preds, valid_input_tags)
return tagged_sequence_redo
else:
print("That is not a valid tag. Type 'help' to see the valid inputs")
tagged_sequence.append((token, tag))
return tagged_sequence
def naiveConsoleLabel(raw_strings, labels, module):
print('\nStart console labeling!\n')
valid_input_tags = OrderedDict([(str(i), label) for i, label in enumerate(labels)])
printHelp(valid_input_tags)
valid_responses = ['t', 's', 'f', '']
finished = False
strings_left_to_tag = raw_strings.copy()
total_strings = len(raw_strings)
tagged_strings = set([])
for i, raw_sequence in enumerate(raw_strings, 1):
if not finished:
print('\n(%s of %s)' % (i, total_strings))
print('-'*50)
print('STRING: %s' %raw_sequence)
tokens = module.tokenize(raw_sequence)
user_input = None
while user_input not in valid_responses :
sys.stderr.write('(t)ag / (s)kip / (f)inish tagging / (h)elp\n')
user_input = sys.stdin.readline().strip()
if user_input =='t' or user_input == '':
tagged_sequence = naiveManualTag(tokens, valid_input_tags)
tagged_strings.add(tuple(tagged_sequence))
strings_left_to_tag.remove(raw_sequence)
elif user_input in ('h', 'help', '?') :
printHelp(valid_input_tags)
elif user_input == 's':
print('Skipped\n')
elif user_input == 'f':
finished = True
print('Done! Yay!')
return tagged_strings, strings_left_to_tag
def naiveManualTag(raw_sequence, valid_input_tags):
sequence_labels = []
for token in raw_sequence:
valid_tag = False
while not valid_tag:
print('What is \'%s\' ?' %token)
user_input_tag = sys.stdin.readline().strip()
if user_input_tag in valid_input_tags:
valid_tag = True
elif user_input_tag in ('h', 'help', '?') :
printHelp(valid_input_tags)
elif user_input_tag == 'oops':
print('No worries! Let\'s start over in labeling this string')
sequence_labels_redo = naiveManualTag(raw_sequence, valid_input_tags)
return sequence_labels_redo
else:
print("That is not a valid tag. Type 'help' to see the valid inputs")
token_label = valid_input_tags[user_input_tag]
sequence_labels.append((token, token_label))
return sequence_labels
def printHelp(valid_input_tags):
print('*'*50)
print('These are the tags available for labeling:')
for valid_input in valid_input_tags:
print('%s : %s' %(valid_input, valid_input_tags[valid_input]))
print("\ntype 'help' at any time to see labels")
print("type 'oops' if you make a labeling error\n")
print('*'*50, '\n')
def label(module, infile, outfile, xml):
training_data = data_prep_utils.TrainingData(xml, module)
reader = csv.reader(infile)
strings = set(row[0] for row in reader)
labels = module.LABELS
if module.TAGGER:
labeled_list, raw_strings_left = consoleLabel(strings, labels, module)
else:
labeled_list, raw_strings_left = naiveConsoleLabel(strings, labels, module)
training_data.extend(labeled_list)
with open(outfile, 'wb'):
training_data.write(outfile)
file_slug = os.path.basename(infile.name)
if not file_slug.startswith('unlabeled_'):
file_slug = 'unlabeled_' + file_slug
remainder_file = os.path.dirname(infile.name) + file_slug
data_prep_utils.list2file(raw_strings_left, remainder_file)
|
philiparvidsson/pymake2
|
tests/make_depends_circular.py
|
#!/usr/bin/env python
#---------------------------------------
# IMPORTS
#---------------------------------------
import test
from pymake2 import *
#---------------------------------------
# FUNCTIONS
#---------------------------------------
@target
@depends_on('my_target_3')
def my_target_1(conf):
pass
@target
@depends_on('my_target_1')
def my_target_2(conf):
pass
@target
@depends_on('my_target_2')
def my_target_3(conf):
pass
#---------------------------------------
# SCRIPT
#---------------------------------------
test.should_fail()
pymake2({}, [ 'my_target_3' ])
test.success()
|
algorhythms/LeetCode
|
443 String Compression.py
|
#!/usr/bin/python3
"""
Given an array of characters, compress it in-place.
The length after compression must always be smaller than or equal to the original array.
Every element of the array should be a character (not int) of length 1.
After you are done modifying the input array in-place, return the new length of the array.
Follow up:
Could you solve it using only O(1) extra space?
"""
class Solution:
def compress(self, chars):
"""
tedious pointer manipulation
:type chars: List[str]
:rtype: int
"""
ret = 1
s = 0 # start index of current char
for i in range(1, len(chars) + 1):
if i < len(chars) and chars[i] == chars[s]:
continue
l = i - s
if l > 1:
for digit in str(l):
chars[ret] = digit
ret += 1
if i < len(chars):
chars[ret] = chars[i]
ret += 1
s = i
return ret
def compress_error(self, chars):
"""
tedious pointer manipulation
:type chars: List[str]
:rtype: int
"""
s = 0
for idx in range(1, len(chars) + 1):
if idx < len(chars) and chars[idx] == chars[s]:
continue
l = idx - s
if l == 1:
s = min(s + 1, len(chars) - 1)
else:
for digit in str(l):
s += 1
chars[s] = digit
if idx < len(chars):
s += 1
chars[s] = chars[idx]
return s + 1
if __name__ == "__main__":
assert Solution().compress(["a"]) == 1
assert Solution().compress(["a","a","b","b","c","c","c"]) == 6
assert Solution().compress(["a","b","b","b","b","b","b","b","b","b","b","b","b"]) == 4
|
jllanfranchi/phys597_computational2
|
landau_ch19_problem19.3.2/p9x3x2_v2.py
|
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <rawcell>
# #!/usr/bin/env python
# <codecell>
from __future__ import division
from __future__ import with_statement
import numpy as np
from pylab import ion
import matplotlib as mpl
from matplotlib.path import Path
from matplotlib import pyplot as plt
from matplotlib import animation
from scipy.optimize import curve_fit
from scipy.weave import inline, converters
import sys
import time
import cPickle as pickle
from JSAnimation import IPython_display, HTMLWriter
from smartFormat import smartFormat
from plotGoodies import plotDefaults
plotDefaults()
# <codecell>
__author__ = "J.L. Lanfranchi"
__email__ = "jll1062@phys.psu.edu"
__copyright__ = "Copyright 2014 J.L. Lanfranchi"
__credits__ = ["J.L. Lanfranchi"]
__license__ = """Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including without
limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom
the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE."""
# <codecell>
#-- Turn live-interactive plotting on (makes updated plots appear animated)
ion()
#-- Adjust the font used on the plots
font = {'family' : 'serif', 'weight' : 'normal', 'size' : 8}
#mpl.rcParams('font', **font)
# <codecell>
class WaxWendroff:
def __init__(self):
self.c_lax_wendroff = """
py::list ret;
double beta2 = beta*beta;
double u_i2;
double u_ip12;
double u_im12;
double this_diff;
double max_ydiff = 0;
int j;
int i = 0;
//u_i2 = u0(i)*u0(i);
//u_ip12 = u0(i+1)*u0(i+1);
for (j=0; j<n_skip; j++) {
for (i=1; i<m-1; i++) {
//u_im12 = u_i2;
//u_i2 = u_ip2;
u_i2 = u0(i)*u0(i);
u_im12 = u0(i-1)*u0(i-1);
u_ip12 = u0(i+1)*u0(i+1);
//-- Lax - Wendroff scheme
u(i) = u0(i)
- 0.25*beta*(u_ip12 - u_im12)
+ 0.125*beta2 * ( (u0(i+1)+u0(i))*(u_ip12-u_i2)
- (u0(i)+u0(i-1))*(u_i2-u_im12) );
this_diff = fabs(u(i)-u(i-1));
if (this_diff > max_ydiff)
max_ydiff = this_diff;
//-- Update "present step" array element with what was just computed as
// the next step" value for this array element
u0(i) = u(i);
}
}
//for (i=1; i<m-1; i++)
// u0(i) = u(i);
//-- Enforce boundary conditions
//u(0) = 0;
//u(m-1) = 0;
ret.append(max_ydiff);
return_val = ret;
"""
self.m = 1000
self.c = 1.0
#dx = 1./m
self.dx = 2*np.pi/self.m
self.dt = self.dx/10
self.epsilon = 1.0
self.beta = self.epsilon*self.dt/self.dx
self.u = np.zeros((self.m+1),float)
self.u0 = np.zeros((self.m+1), float)
self.uf = np.zeros((self.m+1),float)
self.T_final = 100
self.maxN = int(self.T_final/self.dt)
print "dt =", self.dt, ", dx =", self.dx, \
", epsilon =", self.epsilon, ", beta =", self.beta
self.x = np.arange(-(self.m/2)*self.dx,(self.m/2)*self.dx,self.dx)
print len(self.x)
#-- beta = 0.01
#-- epsilon = 0.2
#-- dx = 1e-3
#-- dt = 1e-4
#-- beta = epsilon*dt/dx = 0.02
self.prob = 1
if self.prob == 0:
def finalFun(x, t):
return -np.exp( - 10.*(x - 1.5 - self.c*t)**2 ) \
+ np.exp( - 10.*(x + 1.5 + self.c*t)**2 ) # Exact
elif self.prob == 1:
def finalFun(x, t):
a0 = -1.0
fx = 1 #4*np.pi
return a0/2*np.sin(fx*x-self.c*t)+a0/2*np.sin(fx*x+self.c*t)
self.u0 = finalFun(self.x, 0)
self.u = np.zeros_like(self.u0)
self.fig1 = plt.figure(1, figsize=(5,10), dpi=120)
self.fig1.clf()
self.ax1 = self.fig1.add_subplot(211)
self.ax1.plot(self.x, self.u0, '-',
color=(.6,.6,.6), lw=6, label="initial cond")
self.l_ns, = self.ax1.plot(self.x, self.u, 'o-',
markersize=2,
color='b',
markerfacecolor=(0.8,0,0,.25),
markeredgecolor=(0.8,0,0,.25),
lw=0.5,
label="numerical soln")
self.ax1.legend(loc="best")
self.ax1.set_xlim(-np.pi,np.pi)
self.ax1.set_ylim(-1,1)
self.ax1.set_xlabel(r"Spatial dimension, $x$")
self.ax1.set_title(r"Spatial wave depiction")
self.ax2 = self.fig1.add_subplot(212)
self.l_ms, = self.ax2.plot(0,0, '-o',
color='k',
markerfacecolor='g',
markersize=3,
lw=1.0)
self.ax2.set_xlabel(r"Time index, $j$")
#ax2.set_ylabel(r"Maximum spatial slope")
self.ax2.set_xlim(0, self.maxN)
self.ax2.set_ylim(0,500)
self.ax2.set_title(r"Maximum spatial slope at a given time step")
plt.tight_layout()
#-- Note: Time steps are indexed with j and spatial coordinates with i.
# The previous solution is preserved in u0 for use in computing the
# new solution, which is incrementally stored into the u array.
#
# Once the computation is complete for the new solution, the u array
# is copied into u0 for use in the next time step.
#def init(self):
self.l_ns.set_data(self.x, finalFun(self.x,0))
self.l_ms.set_data(0,0)
self.maxslopelist = []
slf.nskiplist = []
self.allj = []
self.n_skip = 1
self.j = 0
#return self.l_ns, self.l_ms
def animate(self, ii):
print "Iteration number, ii:", ii
out = inline(self.c_lax_wendroff, ['self.u', 'self.u0', 'self.beta',
'self.m', 'self.n_skip'],
type_converters=converters.blitz)
self.j += self.n_skip
self.allj.append(j)
self.slope = out[0]/self.dx
self.maxslopelist.append(self.slope)
self.n_skip = min( max(int(5e4/self.slope**2), 10), 1000)
self.n_skip = 100
self.nskiplist.append(n_skip)
print out[0]/self.dx
self.l_ns.set_ydata(self.u)
self.l_ms.set_xdata(self.allj)
self.l_ms.set_ydata(self.maxslopelist)
self.ax2.set_ylim(0,np.max(self.maxslopelist))
self.ax2.set_xlim(0,self.j)
self.fig1.canvas.draw()
#plt.draw()
#if j >= maxN or slope > 2000:
# break
#return l_ns, l_ms
#fig2 = plt.figure(2)
#fig2.clf()
#ax = fig2.add_subplot(111)
#ax.plot(nskiplist, 'm-', lw=3)
#ax.set_ylabel("n skip")
#plt.tight_layout()
ww = WaxWendroff()
animation.FuncAnimation(ww.fig1, ww.animate, frames=20, blit=True)
# <codecell>
plt.show()
# <codecell>
|
aclowes/yawn
|
yawn/utilities/cron.py
|
import datetime
from django.core import validators
class Crontab:
"""
Simplified Crontab
Support "minute hour weekday" components of a standard cron job.
- "*/15 2,7,15 1-5" means "every fifteen minutes, on hours 2 7 15, Monday-Friday"
- Minutes are from 0-59, hours from 0-23, and days from 0(Sunday)-6(Saturday)
- Fields can contain multiple comma-separated values
- Values can be an integer or repeating pattern of the '*/2' variety
"""
def __init__(self, schedule: str):
self.schedule = schedule
components = schedule.split(' ')
if len(components) != 3:
raise ValueError('Crontab must be three space-delimited components')
minutes, hours, weekdays = components
self.minutes = parse(minutes, 60)
self.hours = parse(hours, 24)
self.weekdays = parse(weekdays, 24)
def __repr__(self):
return '<Crontab: {}>'.format(self.schedule)
def next_run(self, current_time: datetime.datetime) -> datetime.datetime:
"""Given the current time, when is the next scheduled run?"""
# if next run is next day, get smallest hour, smallest minute
# if next run is today, future hour, get smallest minute
# if next run is today, this hour, get next greatest minute
next_run = datetime.datetime(current_time.year, current_time.month, current_time.day,
tzinfo=current_time.tzinfo)
weekday = current_time.isoweekday()
weekday = 0 if weekday == 7 else weekday # Move Sunday to day 0
if weekday in self.weekdays:
# could be a run today
if current_time.hour in self.hours:
# could be a run this hour
for minute in self.minutes:
if minute > current_time.minute:
# there is a run this hour
return next_run.replace(hour=current_time.hour, minute=minute)
# no run this hour, check future hours
for hour in self.hours:
if hour > current_time.hour:
# there is a run today
return next_run.replace(hour=hour, minute=self.minutes[0])
# no run today, look for next matching weekday
for day in range(1, 7):
next_run += datetime.timedelta(days=1)
weekday = next_run.isoweekday()
weekday = 0 if weekday == 7 else weekday # Move Sunday to day 0
if weekday in self.weekdays:
return next_run.replace(hour=self.hours[0], minute=self.minutes[0])
raise RuntimeError('No next run found for schedule {}'.format(self.schedule))
def parse(pattern: str, max_value: int):
"""Convert a string crontab component into a set of integers less than a given max"""
values = set()
for part in pattern.split(','):
fraction = part.split('/')
if len(fraction) == 1:
numerator, denominator = part, None
elif len(fraction) == 2:
numerator, denominator = fraction[0], int(fraction[1])
else:
raise ValueError('Expression {} should contain zero or one slash (/)'.format(part))
if numerator == '*':
lower, upper = 0, max_value
elif '-' in numerator:
lower, upper = numerator.split('-')
lower, upper = int(lower), int(upper) + 1
else:
lower, upper = int(numerator), int(numerator) + 1
if lower < 0 or upper > max_value:
raise ValueError('Expression {} is outside the range {}-{}'.format(
part, 0, max_value))
for x in range(lower, upper):
if denominator is None or x % denominator == 0:
values.add(x)
if not values:
raise ValueError('Expression {} gives no runs'.format(pattern))
return sorted(values)
def cron_validator(crontab: str):
try:
Crontab(crontab)
except ValueError as exc:
raise validators.ValidationError(
'Invalid crontab expression: {} ({})'.format(crontab, exc))
|
Shuailong/Leetcode
|
solutions/pascals-triangle-ii.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
pascals-triangle-ii.py
Created by Shuailong on 2016-02-20.
https://leetcode.com/problems/pascals-triangle-ii/.
"""
class Solution(object):
def getRow(self, rowIndex):
"""
:type rowIndex: int
:rtype: List[int]
"""
lastrow = []
row = []
for i in range(rowIndex+1):
row = [0]*(i+1)
for j in range(i+1):
if j == 0 or j == i:
row[j] = 1
else:
row[j] = lastrow[j] + lastrow[j-1]
lastrow = row
return row
def main():
solution = Solution()
print solution.getRow(5)
if __name__ == '__main__':
main()
|
Burning-Man-Earth/iBurn-Data
|
scripts/2013/playa_data/merge_camp_id_from_events.py
|
import Levenshtein
import json
from string_util import cleanString
'''
This script merges camp ids into the data from
./data/playaevents-camps-2013.json
OR ./results/camp_data_and_locations.json
using playaevents-events-2013
(The Playa Events API Events feed)
'''
# Threshold under which to discard partial string matches
MATCH_THRESHOLD = .7
camp_file = open('./results/camp_data_and_locations.json')
events_file = open('./data/playaevents-events-2013.json')
camp_json = json.loads(camp_file.read())
events_json = json.loads(events_file.read())
# Some entries in event_data are null, remove them before writing final json
null_camp_indexes = []
# camps without a match, for manual inspection
unmatched_camps = []
matched_camps = []
# match name fields between entries in two files
for index, camp in enumerate(camp_json):
max_match = 0
max_match_event = ''
if camp != None and 'name' in camp:
for event in events_json:
if 'hosted_by_camp' in event:
match = Levenshtein.ratio(cleanString(camp['name']), cleanString(event['hosted_by_camp']['name']))
if match > max_match:
max_match = match
max_match_event = event
#print "Best match for " + event['name'] + " : " + max_match_camp['name'] + " (confidence: " + str(max_match) + ")"
if max_match > MATCH_THRESHOLD:
# Match found
camp['id'] = max_match_event['hosted_by_camp']['id']
matched_camps.append(camp)
else:
unmatched_camps.append(camp)
elif not 'name' in camp:
null_camp_indexes.append(index)
# To remove null entries from list, we must move in reverse
# to preserve list order as we remove
null_camp_indexes.reverse()
for index in null_camp_indexes:
camp_json.pop(index)
unmatched_camps_file = open('./results/unmatched_camps_id.json', 'w')
unmatched_camps_file.write(json.dumps(unmatched_camps, sort_keys=True, indent=4))
result_file = open('./results/camp_data_and_locations_ids.json', 'w')
result_file.write(json.dumps(camp_json, sort_keys=True, indent=4))
if len(unmatched_camps) > 0:
print "Matches not found for " + str(len(unmatched_camps)) + " camps"
print "Matched: "+str(len(matched_camps))
|
raphaelrpl/portal
|
backend/test/question_tests/question_edit_tests.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from base import GAETestCase
from datetime import datetime, date
from decimal import Decimal
from question_app.question_model import Question
from routes.questions.edit import index, save
from mommygae import mommy
from tekton.gae.middleware.redirect import RedirectResponse
class IndexTests(GAETestCase):
def test_success(self):
question = mommy.save_one(Question)
template_response = index(question.key.id())
self.assert_can_render(template_response)
class EditTests(GAETestCase):
def test_success(self):
question = mommy.save_one(Question)
old_properties = question.to_dict()
redirect_response = save(question.key.id(), name='name_string')
self.assertIsInstance(redirect_response, RedirectResponse)
edited_question = question.key.get()
self.assertEquals('name_string', edited_question.name)
self.assertNotEqual(old_properties, edited_question.to_dict())
def test_error(self):
question = mommy.save_one(Question)
old_properties = question.to_dict()
template_response = save(question.key.id())
errors = template_response.context['errors']
self.assertSetEqual(set(['name']), set(errors.keys()))
self.assertEqual(old_properties, question.key.get().to_dict())
self.assert_can_render(template_response)
|
velfimov/django-countries
|
django_countries/tests/test_fields.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django
from django.forms import Select
from django.forms.models import modelform_factory
from django.test import TestCase
from django.utils import translation
from django.utils.encoding import force_text
try:
from unittest import skipIf
except:
from django.utils.unittest import skipIf
from django_countries import fields, countries
from django_countries.tests import forms
from django_countries.tests.models import Person, AllowNull, en_zed
skipUnlessLegacy = skipIf(
django.VERSION >= (1, 5),
"Legacy tests only necessary in Django < 1.5")
class TestCountryField(TestCase):
def test_logic(self):
person = Person(name='Chris Beaven', country='NZ')
self.assertEqual(person.country, 'NZ')
self.assertNotEqual(person.country, 'ZZ')
self.assertTrue(person.country)
person.country = ''
self.assertFalse(person.country)
def test_only_from_instance(self):
self.assertRaises(AttributeError, lambda: Person.country)
@skipIf(
django.VERSION < (1, 7), "Field.deconstruct introduced in Django 1.7")
def test_deconstruct(self):
field = Person._meta.get_field('country')
self.assertEqual(
field.deconstruct(),
('country', 'django_countries.fields.CountryField', [],
{'max_length': 2}))
def test_text(self):
person = Person(name='Chris Beaven', country='NZ')
self.assertEqual(force_text(person.country), 'NZ')
def test_name(self):
person = Person(name='Chris Beaven', country='NZ')
self.assertEqual(person.country.name, 'New Zealand')
def test_flag(self):
person = Person(name='Chris Beaven', country='NZ')
with self.settings(STATIC_URL='/static-assets/'):
self.assertEqual(
person.country.flag, '/static-assets/flags/nz.gif')
def test_custom_field_flag_url(self):
person = Person(name='Chris Beaven', country='NZ', other_country='US')
self.assertEqual(
person.other_country.flag, '//flags.example.com/us.gif')
def test_COUNTRIES_FLAG_URL_setting(self):
# Custom relative url
person = Person(name='Chris Beaven', country='NZ')
with self.settings(COUNTRIES_FLAG_URL='img/flag-{code_upper}.png',
STATIC_URL='/static-assets/'):
self.assertEqual(
person.country.flag, '/static-assets/img/flag-NZ.png')
# Custom absolute url
with self.settings(COUNTRIES_FLAG_URL='https://flags.example.com/'
'{code_upper}.PNG'):
self.assertEqual(
person.country.flag, 'https://flags.example.com/NZ.PNG')
def test_blank(self):
person = Person.objects.create(name='The Outsider')
self.assertEqual(person.country, '')
person = Person.objects.get(pk=person.pk)
self.assertEqual(person.country, '')
def test_null(self):
person = AllowNull.objects.create(country=None)
self.assertIsNone(person.country.code)
person = AllowNull.objects.get(pk=person.pk)
self.assertIsNone(person.country.code)
def test_len(self):
person = Person(name='Chris Beaven', country='NZ')
self.assertEqual(len(person.country), 2)
person = Person(name='The Outsider', country=None)
self.assertEqual(len(person.country), 0)
def test_lookup_text(self):
Person.objects.create(name='Chris Beaven', country='NZ')
Person.objects.create(name='Pavlova', country='NZ')
Person.objects.create(name='Killer everything', country='AU')
lookup = Person.objects.filter(country='NZ')
names = lookup.order_by('name').values_list('name', flat=True)
self.assertEqual(list(names), ['Chris Beaven', 'Pavlova'])
def test_lookup_country(self):
Person.objects.create(name='Chris Beaven', country='NZ')
Person.objects.create(name='Pavlova', country='NZ')
Person.objects.create(name='Killer everything', country='AU')
oz = fields.Country(code='AU', flag_url='')
lookup = Person.objects.filter(country=oz)
names = lookup.values_list('name', flat=True)
self.assertEqual(list(names), ['Killer everything'])
def test_save_empty_country(self):
Person.objects.create(name='The Outsider')
person = Person.objects.get()
self.assertEqual(person.country, '')
def test_create_modelform(self):
Form = modelform_factory(Person, fields=['country'])
form_field = Form().fields['country']
self.assertTrue(isinstance(form_field.widget, Select))
def test_render_form(self):
Form = modelform_factory(Person, fields=['country'])
Form().as_p()
class TestCountryObject(TestCase):
def test_hash(self):
country = fields.Country(code='XX', flag_url='')
self.assertEqual(hash(country), hash('XX'))
def test_repr(self):
country1 = fields.Country(code='XX')
country2 = fields.Country(code='XX', flag_url='')
self.assertEqual(
repr(country1),
'Country(code={0})'.format(repr('XX')))
self.assertEqual(
repr(country2),
'Country(code={0}, flag_url={1})'.format(repr('XX'), repr('')))
def test_flag_on_empty_code(self):
country = fields.Country(code='', flag_url='')
self.assertEqual(country.flag, '')
def test_ioc_code(self):
country = fields.Country(code='NL', flag_url='')
self.assertEqual(country.ioc_code, 'NED')
def test_country_from_ioc_code(self):
country = fields.Country.country_from_ioc('NED')
self.assertEqual(country, fields.Country('NL', flag_url=''))
def test_country_from_blank_ioc_code(self):
country = fields.Country.country_from_ioc('')
self.assertIsNone(country)
def test_country_from_nonexistence_ioc_code(self):
country = fields.Country.country_from_ioc('XXX')
self.assertIsNone(country)
def test_alpha3(self):
country = fields.Country(code='BN')
self.assertEqual(country.alpha3, 'BRN')
def test_alpha3_invalid(self):
country = fields.Country(code='XX')
self.assertEqual(country.alpha3, '')
def test_numeric(self):
country = fields.Country(code='BN')
self.assertEqual(country.numeric, 96)
def test_numeric_padded(self):
country = fields.Country(code='AL')
self.assertEqual(country.numeric_padded, '008')
country = fields.Country(code='BN')
self.assertEqual(country.numeric_padded, '096')
country = fields.Country(code='NZ')
self.assertEqual(country.numeric_padded, '554')
def test_numeric_invalid(self):
country = fields.Country(code='XX')
self.assertEqual(country.numeric, None)
def test_numeric_padded_invalid(self):
country = fields.Country(code='XX')
self.assertEqual(country.numeric_padded, None)
class TestModelForm(TestCase):
def test_translated_choices(self):
lang = translation.get_language()
translation.activate('eo')
form = forms.PersonForm()
try:
# This is just to prove that the language changed.
self.assertEqual(list(countries)[0][1], 'Afganio')
# If the choices aren't lazy, this wouldn't be translated. It's the
# second choice because the first one is the initial blank option.
self.assertEqual(
form.fields['country'].choices[1][1], 'Afganio')
self.assertEqual(
form.fields['country'].widget.choices[1][1], 'Afganio')
finally:
translation.activate(lang)
@skipUnlessLegacy
def test_legacy_default(self):
self.assertEqual(
forms.LegacyForm.base_fields['default'].initial, 'AU')
@skipUnlessLegacy
def test_legacy_default_callable(self):
self.assertEqual(
forms.LegacyForm.base_fields['default_callable'].initial, en_zed)
form = forms.LegacyForm()
self.assertEqual(form['default_callable'].value(), 'NZ')
@skipUnlessLegacy
def test_legacy_empty_value(self):
self.assertEqual(
forms.LegacyForm.base_fields['default'].empty_value, None)
self.assertEqual(
forms.LegacyForm.base_fields['default_callable'].empty_value, '')
|
opencivicdata/scrapers-ca
|
ca_mb_winnipeg/people.py
|
from utils import CanadianScraper, CanadianPerson as Person
import json
import re
import requests
COUNCIL_PAGE = 'http://winnipeg.ca/council/'
class WinnipegPersonScraper(CanadianScraper):
def scrape(self):
# https://winnipeg.ca/council/wards/includes/wards.js
# var COUNCIL_API = 'https://data.winnipeg.ca/resource/r4tk-7dip.json';
api_url = 'https://data.winnipeg.ca/resource/r4tk-7dip.json'
data = json.loads(requests.get(api_url).content)
page = self.lxmlize(COUNCIL_PAGE, 'utf-8')
councillors = page.xpath('//div[@class="box"]')
assert len(councillors), 'No councillors found'
for councillor in councillors:
role = councillor.xpath('.//div[@class="insideboxtitle"]/text()')[0].strip()
name = councillor.xpath('.//p[@class="insideboxtext"]/text()')[0]
image = councillor.xpath('.//@src')[0]
if 'Councillor' in name:
role = 'Councillor'
name = name.replace('Councillor ', '')
url = api_url
item = next((item for item in data if item['person'] == name and item['current_council']), None)
if item is None:
raise Exception(name)
district = item['name_english'].replace(' - ', '—') # hyphen, m-dash
email = item['email_link']
voice = item['phone']
fax = item['fax']
p = Person(primary_org='legislature', name=name, district=district, role=role)
p.add_source(COUNCIL_PAGE)
p.add_source(url)
if not image.endswith('nophoto.jpg'):
p.image = image
p.add_contact('email', parse_email(email))
p.add_contact('voice', voice, 'legislature')
p.add_contact('fax', fax, 'legislature')
yield p
def parse_email(email):
return re.search('=([^&]+)', email).group(1) + '@winnipeg.ca'
|
rug-compling/hmm-reps
|
eval/ner/sequences/extended_feature.py
|
import sys
import numpy as np
#######################
#### Feature Class
### Extracts features from a labeled corpus
#######################
from eval.ner.readers.brown import prepare_cluster_map
from eval.ner.sequences.id_feature import IDFeatures
class ExtendedFeatures(IDFeatures):
def __init__(self, dataset, brown_cluster_file=None):
super().__init__(dataset)
self.brown_cluster_file = brown_cluster_file
self.w_to_clusterid = None
if brown_cluster_file:
self.w_to_clusterid = prepare_cluster_map(self.brown_cluster_file)
assert self.w_to_clusterid is not None
# use emission features:
self.alphanumeric = False
self.alldigits = False
self.brown_id = False
self.brown_id_plus1 = False
self.brown_id_plus2 = False
self.brown_id_minus1 = False
self.brown_id_minus2 = False
self.brown_prefix = False # prefix length features; same for all brown_id
self.brown_prefix_lengths = []
self.capitalized = False
self.cappattern = False
self.hyphen = False
self.id = False
self.id_plus1 = False
self.id_plus2 = False
self.id_minus1 = False
self.id_minus2 = False
self.prefix = False
self.rep_id = False
self.rep_id_plus1 = False
self.rep_id_plus2 = False
self.rep_id_minus1 = False
self.rep_id_minus2 = False
self.suffix = False
self.uppercased = False
def set_baseline_features(self):
"""
Use listed emission features as baseline
"""
self.capitalized = True
self.cappattern = True
self.hyphen = True
self.id = True
self.id_plus1 = True
self.id_plus2 = True
self.id_minus1 = True
self.id_minus2 = True
self.prefix = True
self.suffix = True
self.uppercased = True
def get_emission_features(self, sequence, pos, y):
"""
Handles previous emissions by expanding the feature_cache dictionaries
"""
# w
x = sequence.x[pos]
# w-1
if pos > 0:
x_min1 = sequence.x[pos-1]
else:
x_min1 = -1
# w-2
if pos > 1:
x_min2 = sequence.x[pos-2]
else:
x_min2 = -2
# w+1
if pos < len(sequence.x)-1:
x_plus1 = sequence.x[pos+1]
else:
x_plus1 = -3
# w+2
if pos < len(sequence.x)-2:
x_plus2 = sequence.x[pos+2]
else:
x_plus2 = -4
if x not in self.node_feature_cache:
self.node_feature_cache[x] = {}
if x_min1 not in self.node_feature_cache[x]:
self.node_feature_cache[x][x_min1] = {}
if x_min2 not in self.node_feature_cache[x][x_min1]:
self.node_feature_cache[x][x_min1][x_min2] = {}
if x_plus1 not in self.node_feature_cache[x][x_min1][x_min2]:
self.node_feature_cache[x][x_min1][x_min2][x_plus1] = {}
if x_plus2 not in self.node_feature_cache[x][x_min1][x_min2][x_plus1]:
self.node_feature_cache[x][x_min1][x_min2][x_plus1][x_plus2] = {}
if y not in self.node_feature_cache[x][x_min1][x_min2][x_plus1][x_plus2]:
node_idx = {}
node_idx = self.add_emission_features(sequence, pos, y, node_idx)
self.node_feature_cache[x][x_min1][x_min2][x_plus1][x_plus2][y] = node_idx
idx = self.node_feature_cache[x][x_min1][x_min2][x_plus1][x_plus2][y]
return idx
def add_emission_features(self, sequence, pos, y, features):
# w
x = sequence.x[pos]
# w-1
if pos > 0:
x_min1 = sequence.x[pos-1]
else:
x_min1 = -1
# w-2
if pos > 1:
x_min2 = sequence.x[pos-2]
else:
x_min2 = -2
# w+1
if pos < len(sequence.x)-1:
x_plus1 = sequence.x[pos+1]
else:
x_plus1 = -3
# w+2
if pos < len(sequence.x)-2:
x_plus2 = sequence.x[pos+2]
else:
x_plus2 = -4
# Get tag name from ID.
y_name = self.dataset.y_dict.get_label_name(y)
# Get word name from ID.
x_name = self.dataset.x_dict.get_label_name(x)
word = str(x_name)
# w-1
if x_min1 == -1: # if no previous word
word_min1 = "out-of-seq-left"
else:
x_min1_name = self.dataset.x_dict.get_label_name(x_min1)
word_min1 = str(x_min1_name)
# w-2
if x_min2 == -2: # if no pre-previous word
word_min2 = "out-of-seq-left"
else:
x_min2_name = self.dataset.x_dict.get_label_name(x_min2)
word_min2 = str(x_min2_name)
# w+1
if x_plus1 == -3: # if no next word
word_plus1 = "out-of-seq-right"
else:
x_plus1_name = self.dataset.x_dict.get_label_name(x_plus1)
word_plus1 = str(x_plus1_name)
# w+3
if x_plus2 == -4: # if no post-next word
word_plus2 = "out-of-seq-right"
else:
x_plus2_name = self.dataset.x_dict.get_label_name(x_plus2)
word_plus2 = str(x_plus2_name)
if self.id:
# Generate feature name.
feat_name = "id:{}::{}".format(word, y_name)
self.features_used.add("id")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.id_minus1:
feat_name = "id-1:{}::{}".format(word_min1, y_name)
self.features_used.add("id-1")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.id_minus2:
feat_name = "id-2:{}::{}".format(word_min2, y_name)
self.features_used.add("id-2")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.id_plus1:
feat_name = "id+1:{}::{}".format(word_plus1, y_name)
self.features_used.add("id+1")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.id_plus2:
feat_name = "id+2:{}::{}".format(word_plus2, y_name)
self.features_used.add("id+2")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.capitalized:
# Iscapitalized
if word.istitle():
# Generate feature name.
feat_name = "capitalized::{}".format(y_name)
self.features_used.add("capitalized")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.uppercased:
# Allcapitalized
if word.isupper():
# Generate feature name.
feat_name = "uppercased::{}".format(y_name)
self.features_used.add("uppercased")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.cappattern:
# Capitalization pattern in window
pattern = []
# w-2
if word_min2 == "out-of-seq-left":
pattern.append("-")
else:
if word_min2.istitle():
pattern.append("C")
else:
pattern.append("N")
# w-1
if word_min1 == "out-of-seq-left":
pattern.append("-")
else:
if word_min1.istitle():
pattern.append("C")
else:
pattern.append("N")
# w
if word.istitle():
pattern.append("C")
else:
pattern.append("N")
# w+1
if word_plus1 == "out-of-seq-right":
pattern.append("-")
else:
if word_plus1.istitle():
pattern.append("C")
else:
pattern.append("N")
# w+2
if word_plus2 == "out-of-seq-right":
pattern.append("-")
else:
if word_plus2.istitle():
pattern.append("C")
else:
pattern.append("N")
# Generate feature name.
feat_name = "cappattern:{}::{}".format("".join(pattern), y_name)
self.features_used.add("cappattern")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.alldigits:
# Alldigits
if word.isdigit():
# Generate feature name.
feat_name = "number::{}".format(y_name)
self.features_used.add("number")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.alphanumeric:
# Alphanumeric
if word.isalnum():
# Generate feature name.
feat_name = "alphanumber::{}".format(y_name)
self.features_used.add("alphanumber")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.hyphen:
# Hyphenized
if "-" in word:
# Generate feature name.
feat_name = "hyphen::{}".format(y_name)
self.features_used.add("hyphen")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.suffix:
# Suffixes
max_suffix = 3
for i in range(max_suffix):
if len(word) > i+1:
suffix = word[-(i+1):]
# Generate feature name.
feat_name = "suffix:{}::{}".format(suffix, y_name)
self.features_used.add("suffix")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
if self.prefix:
# Prefixes
max_prefix = 3
for i in range(max_prefix):
if len(word) > i+1:
prefix = word[:i+1]
# Generate feature name.
feat_name = "prefix:{}::{}".format(prefix, y_name)
self.features_used.add("prefix")
# Get feature ID from name.
feat_id = self.add_feature(feat_name)
# Append feature.
if feat_id != -1:
features[feat_id] = 1
# hmm wordrep features
positions = []
if self.rep_id:
positions.append(0)
if self.rep_id_minus1:
if word_min1 != "out-of-seq-left": # if no previous word
positions.append(-1)
if self.rep_id_minus2:
if word_min2 != "out-of-seq-left": # if no previous previous word
positions.append(-2)
if self.rep_id_plus1:
if word_plus1 != "out-of-seq-right": # if no next word
positions.append(1)
if self.rep_id_plus2:
if word_plus2 != "out-of-seq-right": # if no next next word
positions.append(2)
for position in positions:
if sequence.w is not None:
rep_id = sequence.w[pos+position]
self.add_rep_feat(rep_id, position, y_name, features, is_tree=False)
#changed sequence.t to sequence.u as seq.t is deleted
if sequence.u is not None:
try:
rep_id = sequence.u[pos+1+position] #offset of 1 because 0 is root
except KeyError:
continue
self.add_rep_feat(rep_id, position, y_name, features, is_tree=True)
if self.brown_id:
# w: Brown cluster id
clusterid = None
if word in self.w_to_clusterid:
clusterid = self.w_to_clusterid[word]
elif word.lower() in self.w_to_clusterid:
clusterid = self.w_to_clusterid[word.lower()]
if clusterid:
feat_core_name = "brown_id"
feat_name = "{}::{}::{}".format(feat_core_name, clusterid, y_name)
self.features_used.add(feat_core_name)
feat_id = self.add_feature(feat_name)
if feat_id != -1:
features[feat_id] = 1
if self.brown_prefix:
features = self.add_brown_pref_feat(feat_core_name, clusterid, y_name, features, self.brown_prefix_lengths)
# w window: Brown Cluster ids
if self.brown_id_minus1:
# wcluster-1
if word_min1 != "out-of-seq-left": # if no previous word
clusterid = None
if word_min1 in self.w_to_clusterid:
clusterid = self.w_to_clusterid[word_min1]
elif word_min1.lower() in self.w_to_clusterid:
clusterid = self.w_to_clusterid[word_min1.lower()]
if clusterid:
feat_core_name = "brown_id-1"
feat_name = "{}::{}::{}".format(feat_core_name, clusterid, y_name)
self.features_used.add(feat_core_name)
feat_id = self.add_feature(feat_name)
if feat_id != -1:
features[feat_id] = 1
if self.brown_prefix:
features = self.add_brown_pref_feat(feat_core_name, clusterid, y_name, features, self.brown_prefix_lengths)
if self.brown_id_minus2:
# wcluster-2
if word_min2 != "out-of-seq-left": # if no pre-previous word
clusterid = None
if word_min2 in self.w_to_clusterid:
clusterid = self.w_to_clusterid[word_min2]
elif word_min2.lower() in self.w_to_clusterid:
clusterid = self.w_to_clusterid[word_min2.lower()]
if clusterid:
feat_core_name = "brown_id-2"
feat_name = "{}::{}::{}".format(feat_core_name, clusterid, y_name)
self.features_used.add(feat_core_name)
feat_id = self.add_feature(feat_name)
if feat_id != -1:
features[feat_id] = 1
if self.brown_prefix:
features = self.add_brown_pref_feat(feat_core_name, clusterid, y_name, features, self.brown_prefix_lengths)
if self.brown_id_plus1:
# wcluster+1
if word_plus1 != "out-of-seq-right": # if no next word
clusterid = None
if word_plus1 in self.w_to_clusterid:
clusterid = self.w_to_clusterid[word_plus1]
elif word_plus1.lower() in self.w_to_clusterid:
clusterid = self.w_to_clusterid[word_plus1.lower()]
if clusterid:
feat_core_name = "brown_id+1"
feat_name = "{}::{}::{}".format(feat_core_name, clusterid, y_name)
self.features_used.add(feat_core_name)
feat_id = self.add_feature(feat_name)
if feat_id != -1:
features[feat_id] = 1
if self.brown_prefix:
features = self.add_brown_pref_feat(feat_core_name, clusterid, y_name, features, self.brown_prefix_lengths)
if self.brown_id_plus2:
# wcluster+2
if word_plus2 != "out-of-seq-right": # if no next word
clusterid = None
if word_plus2 in self.w_to_clusterid:
clusterid = self.w_to_clusterid[word_plus2]
elif word_plus2.lower() in self.w_to_clusterid:
clusterid = self.w_to_clusterid[word_plus2.lower()]
if clusterid:
feat_core_name = "brown_id+2"
feat_name = "{}::{}::{}".format(feat_core_name, clusterid, y_name)
self.features_used.add(feat_core_name)
feat_id = self.add_feature(feat_name)
if feat_id != -1:
features[feat_id] = 1
if self.brown_prefix:
features = self.add_brown_pref_feat(feat_core_name, clusterid, y_name, features, self.brown_prefix_lengths)
return features
def add_brown_pref_feat(self, feat_core_name, clusterid, y_name, features, pref_lengths):
"""
:param pref_lengths: list containing ints representing prefix lengths
"""
for pref in pref_lengths:
feat_name = "{}_p{}::{}::{}".format(feat_core_name, pref, clusterid[:pref], y_name)
self.features_used.add("{}_p{}".format(feat_core_name, pref))
feat_id = self.add_feature(feat_name)
if feat_id != -1:
features[feat_id] = 1
return features
# def get_transition_features(self, sequence, pos, y, y_prev, y_prev_prev):
# assert (pos >= 0 and pos < len(sequence.x))
#
# if y not in self.edge_feature_cache:
# self.edge_feature_cache[y] = {}
# if y_prev not in self.edge_feature_cache[y]:
# self.edge_feature_cache[y][y_prev] = {}
# if y_prev_prev not in self.edge_feature_cache[y][y_prev]:
# edge_idx = []
# edge_idx = self.add_transition_features(sequence, pos, y, y_prev, y_prev_prev, edge_idx)
# self.edge_feature_cache[y][y_prev][y_prev_prev] = edge_idx
# idx = self.edge_feature_cache[y][y_prev][y_prev_prev]
#
# return idx[:]
# def add_transition_features(self, sequence, pos, y, y_prev, y_prev_prev, features):
# assert pos < len(sequence.x)-1
# # Get label name from ID.
# y_name = self.dataset.y_dict.get_label_name(y)
# # Get previous label names from ID.
# y_prev_name = self.dataset.y_dict.get_label_name(y_prev)
# y_prev_prev_name = self.dataset.y_dict.get_label_name(y_prev_prev)
# # Generate feature name.
# feat_name = "prev_tag:{}::{}".format(y_prev_name, y_name)
# self.features_used.add("prev_tag")
# # Get feature ID from name.
# feat_id = self.add_feature(feat_name)
# # Append feature.
# if feat_id != -1:
# features.append(feat_id)
#
# feat_name = "prev_prev_tag:{}::{}".format(y_prev_prev_name, y_name)
# self.features_used.add("prev_prev_tag")
# # Get feature ID from name.
# feat_id = self.add_feature(feat_name)
# # Append feature.
# if feat_id != -1:
# features.append(feat_id)
#
# return features
def add_rep_feat(self, rep_id, position, y_name, features, is_tree=False):
if position == 0:
position = ""
elif position == 1 or position == 2:
position = "+{}".format(position)
tree = "tree" if is_tree else ""
# discrete
if isinstance(rep_id, (int, np.int64, np.int32)):
feat_name = "{}rep_id{}::{}::{}".format(tree, position, rep_id, y_name)
self.features_used.add("{}rep_id{}".format(tree, position))
feat_id = self.add_feature(feat_name)
if feat_id != -1:
features[feat_id] = 1
# continuous
elif isinstance(rep_id, np.ndarray):
#max_i = np.max(rep_id)
for c, i in enumerate(rep_id):
#if i == max_i or i == max_i-1:
#feat_name = "cont_{}rep_id{}::{}::{}::{}".format(tree, position, c, i, y_name)
feat_name = "cont_{}rep_id{}::{}::{}".format(tree, position, c, y_name)
self.features_used.add("cont_{}rep_id{}".format(tree, position))
feat_id = self.add_feature(feat_name)
if feat_id != -1:
features[feat_id] = i
else:
raise TypeError
#sys.exit("unexpected type: {}".format(type(rep_id)))
|
WenmuZhou/cifar-10-cnn
|
4_Residual_Network/ResNet_keras.py
|
import keras
import numpy as np
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.layers.normalization import BatchNormalization
from keras.layers import Conv2D, Dense, Input, add, Activation, GlobalAveragePooling2D
from keras.initializers import he_normal
from keras.callbacks import LearningRateScheduler, TensorBoard
from keras.models import Model
from keras import optimizers
from keras import regularizers
num_classes = 10
img_rows, img_cols = 32, 32
img_channels = 3
stack_num = 18
batch_size = 128
epochs = 200
iterations = 391
weight_decay = 0.0005
log_filepath = r'./resnet50/'
def scheduler(epoch):
if epoch <= 60:
return 0.1
if epoch <= 120:
return 0.02
if epoch <= 160:
return 0.004
return 0.0008
def color_preprocessing(x_train,x_test):
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train[:,:,:,0] = (x_train[:,:,:,0] - np.mean(x_train[:,:,:,0])) / np.std(x_train[:,:,:,0])
x_train[:,:,:,1] = (x_train[:,:,:,1] - np.mean(x_train[:,:,:,1])) / np.std(x_train[:,:,:,1])
x_train[:,:,:,2] = (x_train[:,:,:,2] - np.mean(x_train[:,:,:,2])) / np.std(x_train[:,:,:,2])
x_test[:,:,:,0] = (x_test[:,:,:,0] - np.mean(x_test[:,:,:,0])) / np.std(x_test[:,:,:,0])
x_test[:,:,:,1] = (x_test[:,:,:,1] - np.mean(x_test[:,:,:,1])) / np.std(x_test[:,:,:,1])
x_test[:,:,:,2] = (x_test[:,:,:,2] - np.mean(x_test[:,:,:,2])) / np.std(x_test[:,:,:,2])
return x_train, x_test
def residual_network(img_input,classes_num=10,stack_n=18):
def residual_block(x,shape,increase_filter=False):
output_filter_num = shape[1]
if increase_filter:
first_stride = (2,2)
else:
first_stride = (1,1)
pre_bn = BatchNormalization()(x)
pre_relu = Activation('relu')(pre_bn)
conv_1 = Conv2D(output_filter_num,
kernel_size=(3,3),
strides=first_stride,
padding='same',
kernel_initializer=he_normal(),
kernel_regularizer=regularizers.l2(weight_decay)
)(pre_relu)
bn_1 = BatchNormalization()(conv_1)
relu1 = Activation('relu')(bn_1)
conv_2 = Conv2D(output_filter_num,
kernel_size=(3,3),
strides=(1,1),
padding='same',
kernel_initializer=he_normal(),
kernel_regularizer=regularizers.l2(weight_decay)
)(relu1)
if increase_filter:
projection = Conv2D(output_filter_num,
kernel_size=(1,1),
strides=(2,2),
padding='same',
kernel_initializer=he_normal(),
kernel_regularizer=regularizers.l2(weight_decay)
)(x)
block = add([conv_2, projection])
else:
block = add([conv_2,x])
return block
x = Conv2D(filters=16,
kernel_size=(3,3),
strides=(1,1),
padding='same',
kernel_initializer=he_normal(),
kernel_regularizer=regularizers.l2(weight_decay)
)(img_input)
for _ in range(0,stack_n):
x = residual_block(x,[16,16])
x = residual_block(x,[16,32],increase_filter=True)
for _ in range(1,stack_n):
x = residual_block(x,[16,32])
x = residual_block(x,[32,64],increase_filter=True)
for _ in range(1,stack_n):
x = residual_block(x,[32,64])
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = GlobalAveragePooling2D()(x)
x = Dense(classes_num,activation='softmax',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay))(x)
return x
if __name__ == '__main__':
# load data
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
# color preprocessing
x_train, x_test = color_preprocessing(x_train, x_test)
# build network
img_input = Input(shape=(img_rows,img_cols,img_channels))
output = residual_network(img_input,num_classes,18)
resnet = Model(img_input, output)
print(resnet.summary())
# set optimizer
sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
resnet.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
# set callback
tb_cb = TensorBoard(log_dir=log_filepath, histogram_freq=0)
change_lr = LearningRateScheduler(scheduler)
cbks = [change_lr,tb_cb]
# set data augmentation
print('Using real-time data augmentation.')
datagen = ImageDataGenerator(horizontal_flip=True,
width_shift_range=0.125,height_shift_range=0.125,fill_mode='constant',cval=0.)
datagen.fit(x_train)
# start training
resnet.fit_generator(datagen.flow(x_train, y_train,batch_size=batch_size),
steps_per_epoch=iterations,
epochs=epochs,
callbacks=cbks,
validation_data=(x_test, y_test))
resnet.save('resnet.h5')
|
gnott/elife-bot
|
activity/activity_InvalidateCdn.py
|
import activity
from provider import cloudfront_provider
"""
activity_InvalidateCdn.py activity
"""
class activity_InvalidateCdn(activity.activity):
def __init__(self, settings, logger, conn=None, token=None, activity_task=None):
activity.activity.__init__(self, settings, logger, conn, token, activity_task)
self.name = "InvalidateCdn"
self.pretty_name = "CloudFront Invalidate Cdn"
self.version = "1"
self.default_task_heartbeat_timeout = 30
self.default_task_schedule_to_close_timeout = 60 * 5
self.default_task_schedule_to_start_timeout = 30
self.default_task_start_to_close_timeout = 60 * 5
self.description = "Runs CloudFront Invalidation request on Cdn bucket."
self.logger = logger
def do_activity(self, data):
try:
article_id = data['article_id']
version = data['version']
run = data['run']
except Exception as e:
self.logger.error("Error retrieving basic article data. Data: %s, Exception: %s" % (str(data), str(e)))
return activity.activity.ACTIVITY_PERMANENT_FAILURE
try:
self.emit_monitor_event(self.settings, article_id, version, run,
self.pretty_name, "start", "Starting check for generation of pdf cover.")
### If we want to run Invalidation only if CDN has been previously populated
# if "files_in_cdn" in data:
# if data["files_in_cdn"] == True:
# cloudfront_provider.create_invalidation(article_id)
# dashboard_message = "CloudFront Invalidation command sent for article %s." % str(article_id)
# else:
# dashboard_message = "CloudFront Invalidation was not necessary for article %s." % str(article_id)
cloudfront_provider.create_invalidation(article_id, self.settings)
dashboard_message = "CloudFront Invalidation command sent for article %s." % str(article_id)
self.emit_monitor_event(self.settings, article_id, version, run,
self.pretty_name, "end", dashboard_message)
return activity.activity.ACTIVITY_SUCCESS
except Exception as e:
error_message = str(e)
self.logger.error(error_message)
self.emit_monitor_event(self.settings, article_id, version, run,
self.pretty_name, "error", error_message)
return activity.activity.ACTIVITY_PERMANENT_FAILURE
|
aronysidoro/django-payasyougo
|
payg/account/tests/factory.py
|
import os
import time
import datetime
import random
from django.db import models
from django.conf import settings
from django.test import TestCase, LiveServerTestCase, RequestFactory
from django.test.client import Client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User, Group
from django.http import Http404
from django.utils import timezone
from model_mommy import mommy
from account.models import AcctStmt, TransType, AcctTrans, Hotel
def _randint(a=10, b=100):
return random.randint(a,b)
def _acct_stmt(hotel, year, month):
'''
Monkey-patch save() so just generating test data and not based on actual
usage from Message records.
'''
global AcctStmt
AcctStmt.save = models.Model.save
return AcctStmt.objects.create(
hotel=hotel,
year=year,
month=month,
monthly_costs=_randint(),
total_sms=_randint(),
balance=_randint()
)
def make_acct_stmts(hotel):
return [_acct_stmt(hotel=hotel, year=2014, month=m)
for m in range(1,13)]
def _acct_trans(hotel, trans_type, insert_date, amount=None):
'''
Monkey-patch save() for same reason as AcctStmt.
'init_amt', 'recharge_amt' are 1000 Stripe Credits.
'sms_used' are b/t -100 ... -10 (for testing purposes)
Positive `amount` is a `credit`, else a `debit`
'''
global AcctTrans
AcctTrans.save = models.Model.save
# transaction
if trans_type.name in ('init_amt', 'recharge_amt'):
amount = _randint(1000, 1000)
credit, debit = True, False
else:
amount = _randint(-100, -10)
credit, debit = False, True
return AcctTrans.objects.create(
hotel=hotel,
trans_type=trans_type,
amount=amount,
sms_used=_randint(),
insert_date=insert_date,
debit=debit,
credit=credit
)
def make_acct_trans(hotel):
'''
TransType: use get() b/c tests using `fixtures`.
So, generate all transaction records until current date and test format.
This factory method can be used to manually test AcctTransDetailView template
w/ ./manage.py runserver
'''
# datetime
td = datetime.timedelta(days=-30)
next_day = datetime.timedelta(days=1)
today = datetime.date.today()
last_month = today + td
# TransType
init_amt = TransType.objects.get(name='init_amt')
recharge_amt = TransType.objects.get(name='recharge_amt')
sms_used = TransType.objects.get(name='sms_used')
# set the Hotel as Created 1 month ago
hotel.created = last_month
hotel.save()
# Create `init_amt`
_acct_trans(hotel=hotel, trans_type=init_amt, insert_date=hotel.created)
# Daily usage until `today` (start populating based on `last_month` date)
insert_date = hotel.created
balance = AcctTrans.objects.filter(hotel=hotel).balance()
# Loop thro and create. Recharge Account if Balancd < 0
while insert_date < today:
trans = _acct_trans(hotel=hotel, trans_type=sms_used, insert_date=insert_date)
balance = AcctTrans.objects.filter(hotel=hotel).balance()
if balance < 0:
trans = _acct_trans(hotel=hotel, trans_type=recharge_amt, insert_date=insert_date)
insert_date += next_day
return AcctTrans.objects.all()
|
tylerturk/beeswithmachineguns
|
beeswithmachineguns/main.py
|
#!/bin/env python
"""
The MIT License
Copyright (c) 2010 The Chicago Tribune & Contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import bees
from urlparse import urlparse
from optparse import OptionParser, OptionGroup
def parse_options():
"""
Handle the command line arguments for spinning up bees
"""
parser = OptionParser(usage="""
bees COMMAND [options]
Bees with Machine Guns
A utility for arming (creating) many bees (small EC2 instances) to attack
(load test) targets (web applications).
commands:
up Start a batch of load testing servers.
attack Begin the attack on a specific url.
down Shutdown and deactivate the load testing servers.
report Report the status of the load testing servers.
""")
up_group = OptionGroup(parser, "up",
"""In order to spin up new servers you will need to specify at least the -k command, which is the name of the EC2 keypair to use for creating and connecting to the new servers. The bees will expect to find a .pem file with this name in ~/.ssh/. Alternatively, bees can use SSH Agent for the key.""")
# Required
up_group.add_option('-k', '--key', metavar="KEY", nargs=1,
action='store', dest='key', type='string',
help="The ssh key pair name to use to connect to the new servers.")
up_group.add_option('-s', '--servers', metavar="SERVERS", nargs=1,
action='store', dest='servers', type='int', default=5,
help="The number of servers to start (default: 5).")
up_group.add_option('-g', '--group', metavar="GROUP", nargs=1,
action='store', dest='group', type='string', default='default',
help="The security group(s) to run the instances under (default: default).")
up_group.add_option('-z', '--zone', metavar="ZONE", nargs=1,
action='store', dest='zone', type='string', default='us-east-1d',
help="The availability zone to start the instances in (default: us-east-1d).")
up_group.add_option('-i', '--instance', metavar="INSTANCE", nargs=1,
action='store', dest='instance', type='string', default='ami-ff17fb96',
help="The instance-id to use for each server from (default: ami-ff17fb96).")
up_group.add_option('-t', '--type', metavar="TYPE", nargs=1,
action='store', dest='type', type='string', default='t1.micro',
help="The instance-type to use for each server (default: t1.micro).")
up_group.add_option('-l', '--login', metavar="LOGIN", nargs=1,
action='store', dest='login', type='string', default='newsapps',
help="The ssh username name to use to connect to the new servers (default: newsapps).")
up_group.add_option('-v', '--subnet', metavar="SUBNET", nargs=1,
action='store', dest='subnet', type='string', default=None,
help="The vpc subnet id in which the instances should be launched. (default: None).")
parser.add_option_group(up_group)
attack_group = OptionGroup(parser, "attack",
"""Beginning an attack requires only that you specify the -u option with the URL you wish to target.""")
# Required
attack_group.add_option('-u', '--url', metavar="URL", nargs=1,
action='store', dest='url', type='string',
help="URL of the target to attack.")
attack_group.add_option('-K', '--keepalive', metavar="KEEP_ALIVE", nargs=0,
action='store', dest='keep_alive', type='string', default=False,
help="Keep-Alive connection.")
attack_group.add_option('-p', '--post-file', metavar="POST_FILE", nargs=1,
action='store', dest='post_file', type='string', default=False,
help="The POST file to deliver with the bee's payload.")
attack_group.add_option('-m', '--mime-type', metavar="MIME_TYPE", nargs=1,
action='store', dest='mime_type', type='string', default='text/plain',
help="The MIME type to send with the request.")
attack_group.add_option('-n', '--number', metavar="NUMBER", nargs=1,
action='store', dest='number', type='int', default=1000,
help="The number of total connections to make to the target (default: 1000).")
attack_group.add_option('-C', '--cookies', metavar="COOKIES", nargs=1, action='store', dest='cookies',
type='string', default='',
help='Cookies to send during http requests. The cookies should be passed using standard cookie formatting, separated by semi-colons and assigned with equals signs.')
attack_group.add_option('-c', '--concurrent', metavar="CONCURRENT", nargs=1,
action='store', dest='concurrent', type='int', default=100,
help="The number of concurrent connections to make to the target (default: 100).")
attack_group.add_option('-H', '--headers', metavar="HEADERS", nargs=1,
action='store', dest='headers', type='string', default='',
help="HTTP headers to send to the target to attack. Multiple headers should be separated by semi-colons, e.g header1:value1;header2:value2")
attack_group.add_option('-e', '--csv', metavar="FILENAME", nargs=1,
action='store', dest='csv_filename', type='string', default='',
help="Store the distribution of results in a csv file for all completed bees (default: '').")
# Optional
attack_group.add_option('-T', '--tpr', metavar='TPR', nargs=1, action='store', dest='tpr', default=None, type='float',
help='The upper bounds for time per request. If this option is passed and the target is below the value a 1 will be returned with the report details (default: None).')
attack_group.add_option('-R', '--rps', metavar='RPS', nargs=1, action='store', dest='rps', default=None, type='float',
help='The lower bounds for request per second. If this option is passed and the target is above the value a 1 will be returned with the report details (default: None).')
attack_group.add_option('-A', '--basic_auth', metavar='basic_auth', nargs=1, action='store', dest='basic_auth', default='', type='string',
help='BASIC authentication credentials, format auth-username:password (default: None).')
parser.add_option_group(attack_group)
(options, args) = parser.parse_args()
if len(args) <= 0:
parser.error('Please enter a command.')
command = args[0]
if command == 'up':
if not options.key:
parser.error('To spin up new instances you need to specify a key-pair name with -k')
if options.group == 'default':
print 'New bees will use the "default" EC2 security group. Please note that port 22 (SSH) is not normally open on this group. You will need to use to the EC2 tools to open it before you will be able to attack.'
bees.up(options.servers, options.group, options.zone, options.instance, options.type, options.login, options.key, options.subnet)
elif command == 'attack':
if not options.url:
parser.error('To run an attack you need to specify a url with -u')
parsed = urlparse(options.url)
if "/" not in parsed.path:
if not parsed.scheme:
parsed = urlparse("http://" + options.url + "/")
else:
parsed = urlparse(options.url + "/")
if not parsed.scheme:
parsed = urlparse("http://" + options.url)
additional_options = dict(
cookies=options.cookies,
headers=options.headers,
post_file=options.post_file,
keep_alive=options.keep_alive,
mime_type=options.mime_type,
csv_filename=options.csv_filename,
tpr=options.tpr,
rps=options.rps,
basic_auth=options.basic_auth
)
bees.attack(options.url, options.number, options.concurrent, **additional_options)
elif command == 'down':
bees.down()
elif command == 'report':
bees.report()
def main():
parse_options()
|
jgaul/python-oauth2
|
oauth2/__init__.py
|
"""
The MIT License
Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import base64
import urllib
import time
import random
import urlparse
import hmac
import binascii
import httplib2
try:
from urlparse import parse_qs
parse_qs # placate pyflakes
except ImportError:
# fall back for Python 2.5
from cgi import parse_qs
try:
from hashlib import sha1
sha = sha1
except ImportError:
# hashlib was added in Python 2.5
import sha
import _version
__version__ = _version.__version__
OAUTH_VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
class Error(RuntimeError):
"""Generic exception class."""
def __init__(self, message='OAuth error occurred.'):
self._message = message
@property
def message(self):
"""A hack to get around the deprecation errors in 2.6."""
return self._message
def __str__(self):
return self._message
class MissingSignature(Error):
pass
def build_authenticate_header(realm=''):
"""Optional WWW-Authenticate header (401 error)"""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def build_xoauth_string(url, consumer, token=None):
"""Build an XOAUTH string for use in SMTP/IMPA authentication."""
request = Request.from_consumer_and_token(consumer, token,
"GET", url)
signing_method = SignatureMethod_HMAC_SHA1()
request.sign_request(signing_method, consumer, token)
params = []
for k, v in sorted(request.iteritems()):
if v is not None:
params.append('%s="%s"' % (k, escape(v)))
return "%s %s %s" % ("GET", url, ','.join(params))
def to_unicode(s):
""" Convert to unicode, raise exception with instructive error
message if s is not unicode, ascii, or utf-8. """
if not isinstance(s, unicode):
if not isinstance(s, str):
raise TypeError('You are required to pass either unicode or string here, not: %r (%s)' % (type(s), s))
try:
s = s.decode('utf-8')
except UnicodeDecodeError, le:
raise TypeError('You are required to pass either a unicode object or a utf-8 string here. You passed a Python string object which contained non-utf-8: %r. The UnicodeDecodeError that resulted from attempting to interpret it as utf-8 was: %s' % (s, le,))
return s
def to_utf8(s):
return to_unicode(s).encode('utf-8')
def to_unicode_if_string(s):
if isinstance(s, basestring):
return to_unicode(s)
else:
return s
def to_utf8_if_string(s):
if isinstance(s, basestring):
return to_utf8(s)
else:
return s
def to_unicode_optional_iterator(x):
"""
Raise TypeError if x is a str containing non-utf8 bytes or if x is
an iterable which contains such a str.
"""
if isinstance(x, basestring):
return to_unicode(x)
try:
l = list(x)
except TypeError, e:
assert 'is not iterable' in str(e)
return x
else:
return [ to_unicode(e) for e in l ]
def to_utf8_optional_iterator(x):
"""
Raise TypeError if x is a str or if x is an iterable which
contains a str.
"""
if isinstance(x, basestring):
return to_utf8(x)
try:
l = list(x)
except TypeError, e:
assert 'is not iterable' in str(e)
return x
else:
return [ to_utf8_if_string(e) for e in l ]
def escape(s):
"""Escape a URL including any /."""
return urllib.quote(s.encode('utf-8'), safe='~')
def generate_timestamp():
"""Get seconds since epoch (UTC)."""
return int(time.time())
def generate_nonce(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
def generate_verifier(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
class Consumer(object):
"""A consumer of OAuth-protected services.
The OAuth consumer is a "third-party" service that wants to access
protected resources from an OAuth service provider on behalf of an end
user. It's kind of the OAuth client.
Usually a consumer must be registered with the service provider by the
developer of the consumer software. As part of that process, the service
provider gives the consumer a *key* and a *secret* with which the consumer
software can identify itself to the service. The consumer will include its
key in each request to identify itself, but will use its secret only when
signing requests, to prove that the request is from that particular
registered consumer.
Once registered, the consumer can then use its consumer credentials to ask
the service provider for a request token, kicking off the OAuth
authorization process.
"""
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def __str__(self):
data = {'oauth_consumer_key': self.key,
'oauth_consumer_secret': self.secret}
return urllib.urlencode(data)
class Token(object):
"""An OAuth credential used to request authorization or a protected
resource.
Tokens in OAuth comprise a *key* and a *secret*. The key is included in
requests to identify the token being used, but the secret is used only in
the signature, to prove that the requester is who the server gave the
token to.
When first negotiating the authorization, the consumer asks for a *request
token* that the live user authorizes with the service provider. The
consumer then exchanges the request token for an *access token* that can
be used to access protected resources.
"""
key = None
secret = None
callback = None
callback_confirmed = None
verifier = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def set_callback(self, callback):
self.callback = callback
self.callback_confirmed = 'true'
def set_verifier(self, verifier=None):
if verifier is not None:
self.verifier = verifier
else:
self.verifier = generate_verifier()
def get_callback_url(self):
if self.callback and self.verifier:
# Append the oauth_verifier.
parts = urlparse.urlparse(self.callback)
scheme, netloc, path, params, query, fragment = parts[:6]
if query:
query = '%s&oauth_verifier=%s' % (query, self.verifier)
else:
query = 'oauth_verifier=%s' % self.verifier
return urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
return self.callback
def to_string(self):
"""Returns this token as a plain string, suitable for storage.
The resulting string includes the token's secret, so you should never
send or store this string where a third party can read it.
"""
data = {
'oauth_token': self.key,
'oauth_token_secret': self.secret,
}
if self.callback_confirmed is not None:
data['oauth_callback_confirmed'] = self.callback_confirmed
return urllib.urlencode(data)
@staticmethod
def from_string(s):
"""Deserializes a token from a string like one returned by
`to_string()`."""
if not len(s):
raise ValueError("Invalid parameter string.")
params = parse_qs(s, keep_blank_values=False)
if not len(params):
raise ValueError("Invalid parameter string.")
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
token = Token(key, secret)
try:
token.callback_confirmed = params['oauth_callback_confirmed'][0]
except KeyError:
pass # 1.0, no callback confirmed.
return token
def __str__(self):
return self.to_string()
def setter(attr):
name = attr.__name__
def getter(self):
try:
return self.__dict__[name]
except KeyError:
raise AttributeError(name)
def deleter(self):
del self.__dict__[name]
return property(getter, attr, deleter)
class Request(dict):
"""The parameters and information for an HTTP request, suitable for
authorizing with OAuth credentials.
When a consumer wants to access a service's protected resources, it does
so using a signed HTTP request identifying itself (the consumer) with its
key, and providing an access token authorized by the end user to access
those resources.
"""
version = OAUTH_VERSION
def __init__(self, method=HTTP_METHOD, url=None, parameters=None,
body='', is_form_encoded=False):
if url is not None:
self.url = to_unicode(url)
self.method = method
if parameters is not None:
for k, v in parameters.iteritems():
k = to_unicode(k)
v = to_unicode_optional_iterator(v)
self[k] = v
self.body = body
self.is_form_encoded = is_form_encoded
@setter
def url(self, value):
self.__dict__['url'] = value
if value is not None:
scheme, netloc, path, params, query, fragment = urlparse.urlparse(value)
# Exclude default port numbers.
if scheme == 'http' and netloc[-3:] == ':80':
netloc = netloc[:-3]
elif scheme == 'https' and netloc[-4:] == ':443':
netloc = netloc[:-4]
if scheme not in ('http', 'https'):
raise ValueError("Unsupported URL %s (%s)." % (value, scheme))
# Normalized URL excludes params, query, and fragment.
self.normalized_url = urlparse.urlunparse((scheme, netloc, path, None, None, None))
else:
self.normalized_url = None
self.__dict__['url'] = None
@setter
def method(self, value):
self.__dict__['method'] = value.upper()
def _get_timestamp_nonce(self):
return self['oauth_timestamp'], self['oauth_nonce']
def get_nonoauth_parameters(self):
"""Get any non-OAuth parameters."""
return dict([(k, v) for k, v in self.iteritems()
if not k.startswith('oauth_')])
def to_header(self, realm=''):
"""Serialize as a header for an HTTPAuth request."""
oauth_params = ((k, v) for k, v in self.items()
if k.startswith('oauth_'))
stringy_params = ((k, escape(str(v))) for k, v in oauth_params)
header_params = ('%s="%s"' % (k, v) for k, v in stringy_params)
params_header = ', '.join(header_params)
auth_header = 'OAuth realm="%s"' % realm
if params_header:
auth_header = "%s, %s" % (auth_header, params_header)
return {'Authorization': auth_header}
def to_postdata(self):
"""Serialize as post data for a POST request."""
d = {}
for k, v in self.iteritems():
d[k.encode('utf-8')] = to_utf8_optional_iterator(v)
# tell urlencode to deal with sequence values and map them correctly
# to resulting querystring. for example self["k"] = ["v1", "v2"] will
# result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D
return urllib.urlencode(d, True).replace('+', '%20')
def to_url(self):
"""Serialize as a URL for a GET request."""
base_url = urlparse.urlparse(self.url)
try:
query = base_url.query
except AttributeError:
# must be python <2.5
query = base_url[4]
query = parse_qs(query)
for k, v in self.items():
query.setdefault(k, []).append(v)
try:
scheme = base_url.scheme
netloc = base_url.netloc
path = base_url.path
params = base_url.params
fragment = base_url.fragment
except AttributeError:
# must be python <2.5
scheme = base_url[0]
netloc = base_url[1]
path = base_url[2]
params = base_url[3]
fragment = base_url[5]
url = (scheme, netloc, path, params,
urllib.urlencode(query, True), fragment)
return urlparse.urlunparse(url)
def get_parameter(self, parameter):
ret = self.get(parameter)
if ret is None:
raise Error('Parameter not found: %s' % parameter)
return ret
def get_normalized_parameters(self):
"""Return a string that contains the parameters that must be signed."""
items = []
for key, value in self.iteritems():
if key == 'oauth_signature':
continue
# 1.0a/9.1.1 states that kvp must be sorted by key, then by value,
# so we unpack sequence values into multiple items for sorting.
if isinstance(value, basestring):
items.append((to_utf8_if_string(key), to_utf8(value)))
else:
try:
value = list(value)
except TypeError, e:
assert 'is not iterable' in str(e)
items.append((to_utf8_if_string(key), to_utf8_if_string(value)))
else:
items.extend((to_utf8_if_string(key), to_utf8_if_string(item)) for item in value)
# Include any query string parameters from the provided URL
query = urlparse.urlparse(self.url)[4]
url_items = self._split_url_string(query).items()
url_items = [(to_utf8(k), to_utf8(v)) for k, v in url_items if k != 'oauth_signature' ]
items.extend(url_items)
items.sort()
encoded_str = urllib.urlencode(items)
# Encode signature parameters per Oauth Core 1.0 protocol
# spec draft 7, section 3.6
# (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6)
# Spaces must be encoded with "%20" instead of "+"
return encoded_str.replace('+', '%20').replace('%7E', '~')
def sign_request(self, signature_method, consumer, token):
"""Set the signature parameter to the result of sign."""
if not self.is_form_encoded:
# according to
# http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html
# section 4.1.1 "OAuth Consumers MUST NOT include an
# oauth_body_hash parameter on requests with form-encoded
# request bodies."
self['oauth_body_hash'] = base64.b64encode(sha(self.body).digest())
if 'oauth_consumer_key' not in self:
self['oauth_consumer_key'] = consumer.key
if token and 'oauth_token' not in self:
self['oauth_token'] = token.key
self['oauth_signature_method'] = signature_method.name
self['oauth_signature'] = signature_method.sign(self, consumer, token)
@classmethod
def make_timestamp(cls):
"""Get seconds since epoch (UTC)."""
return str(int(time.time()))
@classmethod
def make_nonce(cls):
"""Generate pseudorandom number."""
return str(random.randint(0, 100000000))
@classmethod
def from_request(cls, http_method, http_url, headers=None, parameters=None,
query_string=None):
"""Combines multiple parameter sources."""
if parameters is None:
parameters = {}
# Headers
if headers and 'Authorization' in headers:
auth_header = headers['Authorization']
# Check that the authorization header is OAuth.
if auth_header[:6] == 'OAuth ':
auth_header = auth_header[6:]
try:
# Get the parameters from the header.
header_params = cls._split_header(auth_header)
parameters.update(header_params)
except:
raise Error('Unable to parse OAuth parameters from '
'Authorization header.')
# GET or POST query string.
if query_string:
query_params = cls._split_url_string(query_string)
parameters.update(query_params)
# URL parameters.
param_str = urlparse.urlparse(http_url)[4] # query
url_params = cls._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return cls(http_method, http_url, parameters)
return None
@classmethod
def from_consumer_and_token(cls, consumer, token=None,
http_method=HTTP_METHOD, http_url=None, parameters=None,
body='', is_form_encoded=False):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': consumer.key,
'oauth_timestamp': cls.make_timestamp(),
'oauth_nonce': cls.make_nonce(),
'oauth_version': cls.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
if token.verifier:
parameters['oauth_verifier'] = token.verifier
return Request(http_method, http_url, parameters, body=body,
is_form_encoded=is_form_encoded)
@classmethod
def from_token_and_callback(cls, token, callback=None,
http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = callback
return cls(http_method, http_url, parameters)
@staticmethod
def _split_header(header):
"""Turn Authorization: header into parameters."""
params = {}
parts = header.split(',')
for param in parts:
# Ignore realm parameter.
if param.find('realm') > -1:
continue
# Remove whitespace.
param = param.strip()
# Split key-value.
param_parts = param.split('=', 1)
# Remove quotes and unescape the value.
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
@staticmethod
def _split_url_string(param_str):
"""Turn URL string into parameters."""
parameters = parse_qs(param_str.encode('utf-8'), keep_blank_values=True)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
class Client(httplib2.Http):
"""OAuthClient is a worker to attempt to execute a request."""
def __init__(self, consumer, token=None, cache=None, timeout=None,
proxy_info=None):
if consumer is not None and not isinstance(consumer, Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, Token):
raise ValueError("Invalid token.")
self.consumer = consumer
self.token = token
self.method = SignatureMethod_HMAC_SHA1()
httplib2.Http.__init__(self, cache=cache, timeout=timeout, proxy_info=proxy_info)
def set_signature_method(self, method):
if not isinstance(method, SignatureMethod):
raise ValueError("Invalid signature method.")
self.method = method
def request(self, uri, method="GET", body='', headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None,
realm=None):
DEFAULT_POST_CONTENT_TYPE = 'application/x-www-form-urlencoded'
if not isinstance(headers, dict):
headers = {}
if method == "POST":
headers['Content-Type'] = headers.get('Content-Type',
DEFAULT_POST_CONTENT_TYPE)
is_form_encoded = \
headers.get('Content-Type') == 'application/x-www-form-urlencoded'
if is_form_encoded and body:
parameters = parse_qs(body)
else:
parameters = None
req = Request.from_consumer_and_token(self.consumer,
token=self.token, http_method=method, http_url=uri,
parameters=parameters, body=body, is_form_encoded=is_form_encoded)
req.sign_request(self.method, self.consumer, self.token)
schema, rest = urllib.splittype(uri)
if rest.startswith('//'):
hierpart = '//'
else:
hierpart = ''
host, rest = urllib.splithost(rest)
realm = realm if realm != None else schema + ':' + hierpart + host
if is_form_encoded:
body = req.to_postdata()
elif method == "GET":
uri = req.to_url()
else:
headers.update(req.to_header(realm=realm))
return httplib2.Http.request(self, uri, method=method, body=body,
headers=headers, redirections=redirections,
connection_type=connection_type)
class Server(object):
"""A skeletal implementation of a service provider, providing protected
resources to requests from authorized consumers.
This class implements the logic to check requests for authorization. You
can use it with your web server or web framework to protect certain
resources with OAuth.
"""
timestamp_threshold = 300 # In seconds, five minutes.
version = OAUTH_VERSION
signature_methods = None
def __init__(self, signature_methods=None):
self.signature_methods = signature_methods or {}
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.name] = signature_method
return self.signature_methods
def verify_request(self, request, consumer, token):
"""Verifies an api call and checks all the parameters."""
self._check_version(request)
self._check_signature(request, consumer, token)
parameters = request.get_nonoauth_parameters()
return parameters
def build_authenticate_header(self, realm=''):
"""Optional support for the authenticate header."""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def _check_version(self, request):
"""Verify the correct version of the request for this server."""
version = self._get_version(request)
if version and version != self.version:
raise Error('OAuth version %s not supported.' % str(version))
def _get_version(self, request):
"""Return the version of the request for this server."""
try:
version = request.get_parameter('oauth_version')
except:
version = OAUTH_VERSION
return version
def _get_signature_method(self, request):
"""Figure out the signature with some defaults."""
try:
signature_method = request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# Get the signature method object.
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_verifier(self, request):
return request.get_parameter('oauth_verifier')
def _check_signature(self, request, consumer, token):
timestamp, nonce = request._get_timestamp_nonce()
self._check_timestamp(timestamp)
signature_method = self._get_signature_method(request)
try:
signature = request.get_parameter('oauth_signature')
except:
raise MissingSignature('Missing oauth_signature.')
# Validate the signature.
valid = signature_method.check(request, consumer, token, signature)
if not valid:
key, base = signature_method.signing_base(request, consumer, token)
raise Error('Invalid signature. Expected signature base '
'string: %s' % base)
def _check_timestamp(self, timestamp):
"""Verify that timestamp is recentish."""
timestamp = int(timestamp)
now = int(time.time())
lapsed = now - timestamp
if lapsed > self.timestamp_threshold:
raise Error('Expired timestamp: given %d and now %s has a '
'greater difference than threshold %d' % (timestamp, now,
self.timestamp_threshold))
class SignatureMethod(object):
"""A way of signing requests.
The OAuth protocol lets consumers and service providers pick a way to sign
requests. This interface shows the methods expected by the other `oauth`
modules for signing requests. Subclass it and implement its methods to
provide a new way to sign requests.
"""
def signing_base(self, request, consumer, token):
"""Calculates the string that needs to be signed.
This method returns a 2-tuple containing the starting key for the
signing and the message to be signed. The latter may be used in error
messages to help clients debug their software.
"""
raise NotImplementedError
def sign(self, request, consumer, token):
"""Returns the signature for the given request, based on the consumer
and token also provided.
You should use your implementation of `signing_base()` to build the
message to sign. Otherwise it may be less useful for debugging.
"""
raise NotImplementedError
def check(self, request, consumer, token, signature):
"""Returns whether the given signature is the correct signature for
the given consumer and token signing the given request."""
built = self.sign(request, consumer, token)
return built == signature
class SignatureMethod_HMAC_SHA1(SignatureMethod):
name = 'HMAC-SHA1'
def signing_base(self, request, consumer, token):
if not hasattr(request, 'normalized_url') or request.normalized_url is None:
raise ValueError("Base URL for request is not set.")
sig = (
escape(request.method),
escape(request.normalized_url),
escape(request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def sign(self, request, consumer, token):
"""Builds the base signature string."""
key, raw = self.signing_base(request, consumer, token)
hashed = hmac.new(key, raw, sha)
# Calculate the digest base 64.
return binascii.b2a_base64(hashed.digest())[:-1]
class SignatureMethod_PLAINTEXT(SignatureMethod):
name = 'PLAINTEXT'
def signing_base(self, request, consumer, token):
"""Concatenates the consumer key and secret with the token's
secret."""
sig = '%s&' % escape(consumer.secret)
if token:
sig = sig + escape(token.secret)
return sig, sig
def sign(self, request, consumer, token):
key, raw = self.signing_base(request, consumer, token)
return raw
|
LukeB42/Emissary
|
emissary/controllers/parser.py
|
# This file implements routines for extracting links from response objects.
import re
import lxml
import urlparse
import feedparser
# We have sought to disperse power, to set men and women free.
# That really means: to help them to discover that they are free.
# Everybody's free. The slave is free.
# The ultimate weapon isn't this plague out in Vegas, or any new super H-bomb.
# The ultimate weapon has always existed. Every man, every woman, and every child owns it.
# It's the ability to say No and take the consequences.
# 'Fear is failure.' 'The fear of death is the beginning of slavery.'
# "Thou hast no right but to do thy will.'
# The goose can break the bottle at any second.
# Socrates took the hemlock to prove it.
# Jesus went to the cross to prove it.
# It's in all history, all myth, all poetry.
# It's right out in the open all the time."
from goose import Goose
def extract_links(response):
urls = []
if ('content-type' in response.headers.keys()) and ('xml' in response.headers['content-type']):
f = feedparser.parse(response.text)
for entry in f.entries:
urls.append({entry.link: entry.title})
del f
else: # The following is a highly experimental feature.
url = urlparse.urlparse(response.url)
url = url.scheme + "://" + url.netloc
p = Parser(response.text, url=url)
urls = p.parse()
del url, p
return urls
class Parser(object):
"""
Build a list of relevant links from an HTML string and the root URL.
p = Parser(html_text, root_url)
urls = p.parse()
"""
def __init__(self,html=None,doc=None,url=None):
self.html=html
self.doc=doc
try: self.url = urlparse.urlparse(url).netloc
except: self.url = url
self.links=[]
def root_to_urls(self, doc, titles):
"""
Return a list of urls from an lxml root.
"""
if doc is None:
return []
a_tags = doc.xpath('//a')
# tries to find titles of link elements via tag text
if titles:
return [ (a.get('href'), a.text) for a in a_tags if a.get('href') ]
return [ a.get('href') for a in a_tags if a.get('href') ]
def get_urls(self,_input=None,titles=False,regex=False):
if (not _input) and (not self.html): return []
if not _input: _input = self.html
if regex:
text = re.sub('<[^<]+?>', ' ', _input)
text = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', _input)
text = [i.strip() for i in _input]
return _input or []
if isinstance(_input, str) or isinstance(_input, unicode):
doc = self.fromstring(_input)
else:
doc = text
return self.root_to_urls(doc, titles)
def fromstring(self, html):
try:
self.doc = lxml.html.fromstring(html)
except Exception, e:
return None
return self.doc
def parse(self,html=None,url=None):
"""
Whittle a list of urls into things we're interested in.
"""
if self.links: self.links=[]
urls = self.get_urls(html)
if not urls: return urls
else: urls = set(urls)
if url: url = "http://%s/" % urlparse.urlparse(url).netloc
for u in urls:
if url:
if u == url: continue
if self.url:
if u == self.url: continue
if u.startswith('#'): continue
if not u.startswith('http'):
if url:
if (url[-1] == '/') and (u[0] == '/'): u = url + u[1:]
else: u = url+u
elif self.url:
if (self.url[-1] == '/') and (u[0] == '/'): u = self.url + u[1:]
else: u = self.url+u
else: continue
self.links.append(u)
return self.links
def extract_body(html):
"""
Extract the body text of a web page
"""
g = Goose({'enable_image_fetching':False})
article = g.extract(raw_html=html)
del g
return article.cleaned_text
def extract_title(html):
"""
Extract the body title of a web page
"""
g = Goose({'enable_image_fetching':False})
article = g.extract(raw_html=html)
del g
return article.title
def summarise(article):
stopnum = c = 0
for i,v in enumerate(article.split()):
if v.endswith('.'):
if c >= 2:
stopnum = i+1
break
else:
c += 1
return ' '.join(article.split()[:stopnum])
|
talon-one/talon_one.py
|
talon_one/models/account_additional_cost.py
|
# coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class AccountAdditionalCost(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'id': 'int',
'created': 'datetime',
'account_id': 'int',
'name': 'str',
'title': 'str',
'description': 'str',
'subscribed_applications_ids': 'list[int]'
}
attribute_map = {
'id': 'id',
'created': 'created',
'account_id': 'accountId',
'name': 'name',
'title': 'title',
'description': 'description',
'subscribed_applications_ids': 'subscribedApplicationsIds'
}
def __init__(self, id=None, created=None, account_id=None, name=None, title=None, description=None, subscribed_applications_ids=None, local_vars_configuration=None): # noqa: E501
"""AccountAdditionalCost - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._id = None
self._created = None
self._account_id = None
self._name = None
self._title = None
self._description = None
self._subscribed_applications_ids = None
self.discriminator = None
self.id = id
self.created = created
self.account_id = account_id
self.name = name
self.title = title
self.description = description
if subscribed_applications_ids is not None:
self.subscribed_applications_ids = subscribed_applications_ids
@property
def id(self):
"""Gets the id of this AccountAdditionalCost. # noqa: E501
Unique ID for this entity. # noqa: E501
:return: The id of this AccountAdditionalCost. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this AccountAdditionalCost.
Unique ID for this entity. # noqa: E501
:param id: The id of this AccountAdditionalCost. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and id is None: # noqa: E501
raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501
self._id = id
@property
def created(self):
"""Gets the created of this AccountAdditionalCost. # noqa: E501
The exact moment this entity was created. # noqa: E501
:return: The created of this AccountAdditionalCost. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this AccountAdditionalCost.
The exact moment this entity was created. # noqa: E501
:param created: The created of this AccountAdditionalCost. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
@property
def account_id(self):
"""Gets the account_id of this AccountAdditionalCost. # noqa: E501
The ID of the account that owns this entity. # noqa: E501
:return: The account_id of this AccountAdditionalCost. # noqa: E501
:rtype: int
"""
return self._account_id
@account_id.setter
def account_id(self, account_id):
"""Sets the account_id of this AccountAdditionalCost.
The ID of the account that owns this entity. # noqa: E501
:param account_id: The account_id of this AccountAdditionalCost. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and account_id is None: # noqa: E501
raise ValueError("Invalid value for `account_id`, must not be `None`") # noqa: E501
self._account_id = account_id
@property
def name(self):
"""Gets the name of this AccountAdditionalCost. # noqa: E501
The additional cost name that will be used in API requests and Talang. E.g. if `name == \"shipping\"` then you would set the shipping additional cost by including an `additionalCosts.shipping` property in your request payload. # noqa: E501
:return: The name of this AccountAdditionalCost. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this AccountAdditionalCost.
The additional cost name that will be used in API requests and Talang. E.g. if `name == \"shipping\"` then you would set the shipping additional cost by including an `additionalCosts.shipping` property in your request payload. # noqa: E501
:param name: The name of this AccountAdditionalCost. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and name is None: # noqa: E501
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def title(self):
"""Gets the title of this AccountAdditionalCost. # noqa: E501
The human-readable name for the additional cost that will be shown in the Campaign Manager. Like `name`, the combination of entity and title must also be unique. # noqa: E501
:return: The title of this AccountAdditionalCost. # noqa: E501
:rtype: str
"""
return self._title
@title.setter
def title(self, title):
"""Sets the title of this AccountAdditionalCost.
The human-readable name for the additional cost that will be shown in the Campaign Manager. Like `name`, the combination of entity and title must also be unique. # noqa: E501
:param title: The title of this AccountAdditionalCost. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and title is None: # noqa: E501
raise ValueError("Invalid value for `title`, must not be `None`") # noqa: E501
self._title = title
@property
def description(self):
"""Gets the description of this AccountAdditionalCost. # noqa: E501
A description of this additional cost. # noqa: E501
:return: The description of this AccountAdditionalCost. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this AccountAdditionalCost.
A description of this additional cost. # noqa: E501
:param description: The description of this AccountAdditionalCost. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and description is None: # noqa: E501
raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501
self._description = description
@property
def subscribed_applications_ids(self):
"""Gets the subscribed_applications_ids of this AccountAdditionalCost. # noqa: E501
A list of the IDs of the applications that are subscribed to this additional cost # noqa: E501
:return: The subscribed_applications_ids of this AccountAdditionalCost. # noqa: E501
:rtype: list[int]
"""
return self._subscribed_applications_ids
@subscribed_applications_ids.setter
def subscribed_applications_ids(self, subscribed_applications_ids):
"""Sets the subscribed_applications_ids of this AccountAdditionalCost.
A list of the IDs of the applications that are subscribed to this additional cost # noqa: E501
:param subscribed_applications_ids: The subscribed_applications_ids of this AccountAdditionalCost. # noqa: E501
:type: list[int]
"""
self._subscribed_applications_ids = subscribed_applications_ids
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AccountAdditionalCost):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, AccountAdditionalCost):
return True
return self.to_dict() != other.to_dict()
|
sait-berkeley-infosec/pynessus-api
|
nessusapi/utils.py
|
# coding=utf-8
import inspect
def multiton(cls):
"""
Class decorator to make a class a multiton.
That is, there will be only (at most) one object existing for a given set
of initialization parameters.
"""
instances = {}
def getinstance(*args, **kwargs):
key = _gen_key(cls, *args, **kwargs)
if key not in instances:
instances[key] = cls(*args, **kwargs)
return instances[key]
return getinstance
kwd_mark = (object(), ) # seperate args and kwargs with a unique object
def _gen_key(cls, *args, **kwargs):
new_args, new_kwargs = _normalize_args(cls.__init__, *args, **kwargs)
key = new_args
if new_kwargs:
key += kwd_mark
sorted_items = sorted(new_kwargs.items())
for item in sorted_items:
key += item
return tuple(key)
def _normalize_args(func, *args, **kwargs):
try:
arg_names, _, _, arg_defaults = inspect.getargspec(func)
except AttributeError: # cls has no __init__
arg_names = ['self']
arg_defaults = ()
arg_names = arg_names[1:] # skip first arg (self)
if arg_defaults is None:
arg_defaults = ()
new_args = []
new_kwargs = {}
# match named args to names
for name, arg in zip(arg_names, args):
new_kwargs[name] = arg
# handle extra args from *
if len(args) > len(arg_names):
for arg in args[len(arg_names):]:
new_args.append(arg)
# or fill in default values
else:
for name, default in zip(arg_names[len(args):], arg_defaults):
new_kwargs[name] = default
# merge remaining **kwargs
new_kwargs.update(kwargs)
return new_args, new_kwargs
|
onqtam/doctest
|
scripts/update_stuff.py
|
#!/usr/bin/python2.7
import os
import fileinput
# the version of the release
with open("version.txt") as f: version = f.read()
def getVersionTuple(v):
return tuple(map(int, (v.split("."))))
version_major = str(getVersionTuple(version)[0])
version_minor = str(getVersionTuple(version)[1])
version_patch = str(getVersionTuple(version)[2])
# update version in the header file
print("updating the version in the header file")
doctest_contents = ""
for line in fileinput.input(["../doctest/parts/doctest_fwd.h"]):
if line.startswith("#define DOCTEST_VERSION_MAJOR "):
doctest_contents += "#define DOCTEST_VERSION_MAJOR " + version_major + "\n"
elif line.startswith("#define DOCTEST_VERSION_MINOR "):
doctest_contents += "#define DOCTEST_VERSION_MINOR " + version_minor + "\n"
elif line.startswith("#define DOCTEST_VERSION_PATCH "):
doctest_contents += "#define DOCTEST_VERSION_PATCH " + version_patch + "\n"
else:
doctest_contents += line
readme = open("../doctest/parts/doctest_fwd.h", "w")
readme.write(doctest_contents)
readme.close()
# update meson file with version
meson_contents = ""
for line in fileinput.input(["../meson.build"]):
if line.startswith("project('doctest'"):
meson_contents += "project('doctest', ['cpp'], version: '" + version + "', meson_version:'>=0.50')\n"
else:
meson_contents += line
meson = open("../meson.build", "w")
meson.write(meson_contents)
meson.close()
# run generate_html.py
print("generating html documentation from markdown")
os.system("python generate_html.py")
|
martinkozak/pyircgate-daemon
|
modules/ircgate/ircgate.py
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
#
from application import module
from application import agent as _agent
from libraries import irclib
import threading
import time
import sys
import Queue
class Module(module.AbstractModule):
thread = None
channels = None
targets = None
__password = None
def __init__(self, application):
super(Module, self).__init__(application)
self.channels = {}
self.targets = {}
def initialize(self):
self.__password = self.configuration.get("interaction", "password", "")
###
self.thread = _IRCThread(self)
self.thread.start()
# Registers global 'ircgate' function to RPC agents
for agent in self.application.agents.values():
if isinstance(agent, _agent.AbstractRPCAgent):
callback = lambda source, target, message, password = None: self.application.api.message(source, Message(target, message, password))
agent.register_function(callback, "ircgate")
# Loads targets configuration
for section in self.configuration.sections():
target_channels = []
if section.startswith("target."):
target = section[len("target."):]
items = self.configuration.items(section)
for key, item in items:
if item[0] == "#":
self.channels[item] = True
target_channels.append(item)
self.targets[target] = target_channels
def uninitialize(self):
self.thread.disconnect_event.set()
self.thread.done_event.wait(10)
def handle(self, message):
if isinstance(message.content, Message) and self.is_password_done(message.content.password):
self.thread.queue.put(message.content, True, 10)
def is_password_done(self, password):
return (self.__password and (password == self.__password)) or not self.__password
class Message:
target = None
content = None
password = None
def __init__(self, target, content, password = None):
self.target = target
self.content = content
self.password = password
class _IRCThread(threading.Thread):
disconnect_event = threading.Event()
done_event = threading.Event()
queue = None
parent = None
connection = None
irc = None
def __init__(self, parent):
threading.Thread.__init__(self)
self.setDaemon(True)
self.parent = parent
queue_size = self.parent.application.configuration.getint("queue", "size", 100000)
self.queue = Queue.Queue(queue_size)
def run(self):
pooling_interval = self.parent.application.pooling_interval
###
self.irc = irclib.IRC()
self.connect()
self.connection.add_global_handler("welcome", self.on_connect)
self.connection.add_global_handler("disconnect", self.on_disconnect)
self.connection.add_global_handler("privmsg", self.on_privmsg)
self.connection.add_global_handler("pubmsg", self.on_pubmsg)
while not self.disconnect_event.is_set():
self.irc.process_once()
while not self.queue.empty():
try:
message = self.queue.get(False)
self.send_targets(message.target, message.content)
except Queue.Empty:
break
time.sleep(pooling_interval)
###
if self.parent.application.reloading:
quit_message = self.parent.configuration.get("interaction", "reloading_message", "Reloading.")
else:
quit_message = None
self.disconnect(quit_message)
def send_targets(self, targets, message):
if isinstance(targets, str):
targets = [targets]
for target in targets:
if target in self.parent.targets:
items = self.parent.targets[target]
for item in items:
if ((item in self.parent.channels) and self.parent.channels[item]) or not (item in self.parent.channels):
self.connection.privmsg(item, message)
def join(self, channels):
if isinstance(channels, str):
channels = [channels]
for channel in channels:
self.connection.join(channel)
def disconnect(self, message = None):
if message is None:
quit_message = self.parent.configuration.get("interaction", "quit_message", "Bye!")
else:
quit_message = message
try:
self.connection.quit(quit_message)
self.connection.disconnect()
finally:
self.done_event.set()
def connect(self):
configuration = self.parent.configuration
server = configuration.get("connection", "server", "irc.freenode.net")
port = configuration.getint("connection", "port", 6667)
username = configuration.get("connection", "username", "ircbot")
password = configuration.get("connection", "password", "")
ipv6 = configuration.getboolean("connection", "ipv6", False)
ssl = configuration.getboolean("connection", "ssl", False)
###
while True:
try:
self.connection = self.irc.server().connect(server, port, username, ipv6 = ipv6, ssl = ssl)
break
except irclib.ServerConnectionError:
sys.stderr.write("Connection failed. Sleeping 20 seconds and then reconnect.\n")
time.sleep(20)
###
def on_connect(self, connection, event):
self.join(self.parent.channels)
def on_disconnect(self, connection, event):
if not self.disconnect_event.is_set():
self.connect()
def on_pubmsg(self, connection, event):
message = event.arguments()[0].strip().split(":", 1)
target_user = message[0].strip()
if target_user == connection.get_nickname():
message = message[1].strip()
else:
return
message = message.split(" ", 1)
source = event.source().split("!", 1)[0].strip()
target = event.target()
###
try:
if message[0] == "off":
# password = None
#
# if len(message) >= 2:
# password = message[1].strip()
if True: #self.parent.is_password_done(password):
self.parent.channels[target] = False
self.connection.privmsg(target, "%s: OK, off" % (source))
elif message[0] == "on":
# password = None
#
# if len(message) >= 2:
# password = message[1].strip()
if True: #self.parent.is_password_done(password):
self.parent.channels[target] = True
self.connection.privmsg(target, "%s: OK, on" % (source))
elif message[0] == "status":
if self.parent.channels[target]:
status = "on"
else:
status = "off"
self.connection.privmsg(target, "%s: %s" % (source, status))
except:
sys.stderr.write(source, "FAILED, exception: %s\n" % str(sys.exc_value))
def on_privmsg(self, connection, event):
if event.target() == connection.get_nickname():
message = event.arguments()[0].strip().split(" ", 1)
source = event.source().split("!", 1)[0].strip()
###
try:
if message[0] == "shutdown":
password = None
if len(message) >= 2:
password = message[1].strip()
module = self.parent.application.loads.modules["system"]
bus_message = module.Message(module.Message.SHUTDOWN, password)
self.parent.application.api.message(self.parent, bus_message)
self.connection.privmsg(source, "OK, i'm going to shutdown")
elif message[0] == "reload":
password = None
if len(message) >= 2:
password = message[1].strip()
module = self.parent.application.loads.modules["system"]
bus_message = module.Message(module.Message.RELOAD, password)
self.parent.application.api.message(self.parent, bus_message)
self.connection.privmsg(source, "OK, i'm going to reload")
except:
self.connection.privmsg(source, "FAILED, exception: " + str(sys.exc_value))
###
def factory(application):
return Module(application)
|
lamperi/aoc
|
2016/10/solve.py
|
with open("input.txt") as f:
data = f.read()
bots = 1000
inputs = [[False] for i in range(bots)]
outputs = [[] for i in range(bots)]
maps = [[] for i in range(bots)]
for line in data.splitlines():
if line.startswith("value"):
p = line.split()
val = int(p[1])
bot = int(p[-1])
inputs[bot].append(val)
elif line.startswith("bot"):
p = line.split()
fbot = int(p[1])
lowtype = p[5]
tolow = int(p[6])
hightype = p[-2]
tohigh = int(p[-1])
maps[fbot].append((lowtype, tolow))
maps[fbot].append((hightype, tohigh))
while True:
found = False
for bot_id, input_set in enumerate(inputs):
if not input_set[0] and len(input_set) == 3:
found = True
input_set[0] = True
low,high = sorted(input_set[1:])
if low == 17 and high == 61:
print("PART 1: {}".format(bot_id))
low_target,high_target = maps[bot_id]
if low_target[0] == "bot":
inputs[low_target[1]].append(low)
else:
outputs[low_target[1]].append(low)
if high_target[0] == "bot":
inputs[high_target[1]].append(high)
else:
outputs[high_target[1]].append(high)
#print("Bot {} gives {} to {} and {} to {}".format(bot_id, low, low_target, high, high_target))
if not found:
break
res = outputs[0][0] * outputs[1][0] * outputs[2][0]
print("PART 2: {}".format(res))
|
zrzka/blackmamba
|
blackmamba/lib/rope/base/utils/__init__.py
|
import sys
import warnings
def saveit(func):
"""A decorator that caches the return value of a function"""
name = '_' + func.__name__
def _wrapper(self, *args, **kwds):
if not hasattr(self, name):
setattr(self, name, func(self, *args, **kwds))
return getattr(self, name)
return _wrapper
cacheit = saveit
def prevent_recursion(default):
"""A decorator that returns the return value of `default` in recursions"""
def decorator(func):
name = '_calling_%s_' % func.__name__
def newfunc(self, *args, **kwds):
if getattr(self, name, False):
return default()
setattr(self, name, True)
try:
return func(self, *args, **kwds)
finally:
setattr(self, name, False)
return newfunc
return decorator
def ignore_exception(exception_class):
"""A decorator that ignores `exception_class` exceptions"""
def _decorator(func):
def newfunc(*args, **kwds):
try:
return func(*args, **kwds)
except exception_class:
pass
return newfunc
return _decorator
def deprecated(message=None):
"""A decorator for deprecated functions"""
def _decorator(func, message=message):
if message is None:
message = '%s is deprecated' % func.__name__
def newfunc(*args, **kwds):
warnings.warn(message, DeprecationWarning, stacklevel=2)
return func(*args, **kwds)
return newfunc
return _decorator
def cached(size):
"""A caching decorator based on parameter objects"""
def decorator(func):
cached_func = _Cached(func, size)
return lambda *a, **kw: cached_func(*a, **kw)
return decorator
class _Cached(object):
def __init__(self, func, count):
self.func = func
self.cache = []
self.count = count
def __call__(self, *args, **kwds):
key = (args, kwds)
for cached_key, cached_result in self.cache:
if cached_key == key:
return cached_result
result = self.func(*args, **kwds)
self.cache.append((key, result))
if len(self.cache) > self.count:
del self.cache[0]
return result
def resolve(str_or_obj):
"""Returns object from string"""
from rope.base.utils.pycompat import string_types
if not isinstance(str_or_obj, string_types):
return str_or_obj
if '.' not in str_or_obj:
str_or_obj += '.'
mod_name, obj_name = str_or_obj.rsplit('.', 1)
__import__(mod_name)
mod = sys.modules[mod_name]
return getattr(mod, obj_name) if obj_name else mod
|
hexhex/hexlite
|
java-api/src/test/python/test-jpype.py
|
import sys, logging
logging.basicConfig(level=15, stream=sys.stderr, format="%(levelname)1s:%(filename)10s:%(lineno)3d:%(message)s")
# make log level names shorter so that we can show them
logging.addLevelName(50, 'C')
logging.addLevelName(40, 'E')
logging.addLevelName(30, 'W')
logging.addLevelName(20, 'I')
logging.addLevelName(10, 'D')
logging.info("test1")
import jpype
jpype.startJVM(convertStrings=False)
IPluginAtom = jpype.JClass("at.ac.tuwien.kr.hexlite.api.IPluginAtom")
ISolverContext = jpype.JClass("at.ac.tuwien.kr.hexlite.api.ISolverContext")
IAtom = jpype.JClass("at.ac.tuwien.kr.hexlite.api.IAtom")
ISymbol = jpype.JClass("at.ac.tuwien.kr.hexlite.api.ISymbol")
@jpype.JImplements(ISymbol)
class JavaSymbolImpl:
def __init__(self, what):
self.what = what
@jpype.JOverride
def getType(self):
return ISymbol.Type.CONSTANT
@jpype.JOverride
def getName(self):
return self.what
@jpype.JOverride
def getInteger(self):
return 4711
@jpype.JOverride
def getArguments(self):
return []
@jpype.JOverride
def getTuple(self):
return [self.getName()]
@jpype.JOverride
def hashCode(self):
return jpype.JInt(hash(self.what) & 0x7FFFFFFF)
@jpype.JOverride
def equals(self, other):
if self == other:
return True
elif self.what == other.what:
return True
else:
return False
@jpype.JImplements(IPluginAtom.IQuery)
class JavaQueryImpl:
def __init__(self):
pass
@jpype.JOverride
def getInterpretation(self):
logging.warning("TBD")
return None
@jpype.JOverride
def getInput(self):
ret = jpype.JClass("java.util.ArrayList")()
ret.add(JavaSymbolImpl('foo'))
ret.add(JavaSymbolImpl('bar'))
return ret
@jpype.JImplements(ISolverContext)
class JavaSolverContextImpl:
def __init__(self):
pass
@jpype.JOverride
def storeOutputAtom(self, atom):
logging.warning("TBD")
return jpype.JObject(None, IAtom)
@jpype.JOverride
def storeAtom(self, atom):
logging.warning("TBD")
return None
@jpype.JOverride
def storeConstant(self, s):
logging.warning("TBD store %s", s)
return JavaSymbolImpl('stored({})'.format(s))
@jpype.JOverride
def learn(self, nogood):
logging.warning("TBD")
def jmain():
logging.info("test2")
jls = jpype.JClass("java.lang.System")
jls.out.println("i am printing java.class.path")
print(jls.getProperty("java.class.path"))
JStringPlugin = jpype.JClass("at.ac.tuwien.kr.hexlite.stringplugin.StringPlugin")
logging.info("got JStringPlugin %s", JStringPlugin)
splugin = JStringPlugin()
logging.info("got splugin %s", splugin)
jatoms = splugin.createAtoms()
logging.info("got atoms %s", jatoms)
jconcat = jatoms[0]
jcontext = JavaSolverContextImpl()
jquery = JavaQueryImpl()
janswer = jconcat.retrieve(jcontext, jquery)
logging.info("answer is %s", janswer)
jpype.shutdownJVM()
logging.info("done")
def main():
try:
jmain()
except jpype.JClass("java.lang.Exception") as ex:
logging.error("exception: %s", ex.toString())
st = ex.getStackTrace()
for ste in st:
logging.error("\t at %s", ste.toString())
#sb.append(ex.getClass().getName() + ": " + ex.getMessage() + "\n");
main()
|
afonsoduarte/ansible-stacey
|
roles/letsencrypt/templates/renew-certs.py
|
#!/usr/bin/env python
import os
import sys
import time
from subprocess import CalledProcessError, check_output, STDOUT
certs_dir = '{{ letsencrypt_certs_dir }}'
failed = False
sites = {{ sites }}
sites = (k for k, v in sites.items() if 'ssl' in v and v['ssl'].get('enabled', False) and v['ssl'].get('provider', 'manual') == 'letsencrypt')
for site in sites:
cert_path = os.path.join(certs_dir, site + '.cert')
bundled_cert_path = os.path.join(certs_dir, site + '-bundled.cert')
if os.access(cert_path, os.F_OK):
stat = os.stat(cert_path)
print 'Certificate file ' + cert_path + ' already exists'
if time.time() - stat.st_mtime < {{ letsencrypt_min_renewal_age }} * 86400:
print ' The certificate is younger than {{ letsencrypt_min_renewal_age }} days. Not creating a new certificate.\n'
continue
print 'Generating certificate for ' + site
cmd = ('/usr/bin/env python {{ acme_tiny_software_directory }}/acme_tiny.py '
'--ca {{ letsencrypt_ca }} '
'--account-key {{ letsencrypt_account_key }} '
'--csr {{ acme_tiny_data_directory }}/csrs/{0}.csr '
'--acme-dir {{ acme_tiny_challenges_directory }}'
).format(site)
try:
cert = check_output(cmd, stderr=STDOUT, shell=True)
except CalledProcessError as e:
failed = True
print 'Error while generating certificate for ' + site
print e.output
else:
with open(cert_path, 'w') as cert_file:
cert_file.write(cert)
with open('{{ letsencrypt_intermediate_cert_path }}') as intermediate_cert_file:
intermediate_cert = intermediate_cert_file.read()
with open(bundled_cert_path, 'w') as bundled_file:
bundled_file.write(''.join([cert, intermediate_cert]))
print 'Created certificate for ' + site
if failed:
sys.exit(1)
|
philipbjorge/WTA-Bus-Routing
|
WTA App.py
|
from graph_tool.all import *
from sets import Set
import random
from geopy import geocoders, distance
from decimal import *
def randomize(iterable, bufsize=1000):
''' generator that randomizes an iterable. space: O(bufsize). time: O(n+bufsize). '''
buf = [None] * bufsize
for x in iterable:
i = random.randrange(bufsize)
if buf[i] is not None: yield buf[i]
buf[i] = x
for x in buf:
if x is not None: yield x
g = Graph()
g = load_graph("finalNodes.dot", "dot")
vprop_name = g.vertex_properties["nameP"] #new_vertex_property("string")
vprop_gps = g.vertex_properties["gpsP"] #g.new_vertex_property("vector<float>")
g.edge_properties["busP"] = g.new_edge_property("string")
g.edge_properties["departureP"] = g.new_edge_property("int")
g.edge_properties["travelTimeP"] = g.new_edge_property("int")
eprop_bus = g.edge_properties["busP"] #g.new_edge_property("string")
eprop_departure = g.edge_properties["departureP"] #g.new_edge_property("vector<int>")
eprop_travelTime = g.edge_properties["travelTimeP"] #g.new_edge_property("int") #"Weight"
## Todo: User interface
startAddress = raw_input('What is your starting destination? ')
endAddress = raw_input('What is your ending destination? ')
leaveTime = raw_input('What is your departure time (in 24-h format? ')
lt = leaveTime.split(':')
finalTime = int(lt[0]*60) + int(lt[1])
geo = geocoders.Google('YOUR_API_KEY')
sPlace, sGPS = geo.geocode(startAddress)
ePlace, eGPS = geo.geocode(endAddress)
closestS = -1
closestE = -1
shortestS = 1000.0
shortestE = 1000.0
for v in g.vertices():
coord = vprop_gps[v].split(', ')
sDist = distance.distance(sGPS, [float.fromhex(coord[0]), float.fromhex(coord[1])]).miles
eDist = distance.distance(eGPS, [float.fromhex(coord[0]), float.fromhex(coord[1])]).miles
if sDist < shortestS:
shortestS = sDist
closestS = v
if eDist < shortestE:
shortestE = eDist
closestE = v
print vprop_name[closestS]
print vprop_name[closestE]
START_TIME = finalTime
END_TIME = START_TIME+120
START_NODE = closestS
END_NODE = closestE
#TODO: Better way to add edges
# Specifically, read the file myself
f = open("busEdgesData.txt", "r")
d = f.readlines()
for line in d:
line = line.split(" ")
vData = line[0].split("->")
newSplit = line[1].split("\"")
bus = newSplit[1] # Bus number
times = newSplit[3]
travelTime = newSplit[5]
times = eval('['+times+']')
for t in times:
if t >= START_TIME and t <= END_TIME:
newE = g.add_edge(g.vertex(vData[0]), g.vertex(vData[1]))
eprop_bus[newE] = bus
eprop_departure[newE] = t
eprop_travelTime[newE] = int(travelTime)
successRoutes = []
for rider in range(150):
currentNode = g.vertex(START_NODE)
currentBus = "-1"
currentRoute = []
currentRoute.append([vprop_name[currentNode], "", START_TIME, 0])
print "Trial " + str(rider)
for choice in range(100):
if currentNode == g.vertex(END_NODE):
currentRoute.append(vprop_name[currentNode])
successRoutes.append(currentRoute)
break
else:
coin = random.randint(0,1)
if coin == 0 and currentBus != "-1":
# continue on bus
for e in currentNode.out_edges():
lastTry = currentRoute.pop()
currentRoute.append(lastTry)
#print eprop_bus[e] + " vs " + currentBus
#print str(eprop_departure[e]) + " vs " + str(lastTry[2] + lastTry[3])
if eprop_bus[e] == currentBus and eprop_departure[e] == (lastTry[2] + lastTry[3]):
currentNode = e.target()
currentRoute.append([vprop_name[currentNode], eprop_bus[e], eprop_departure[e], eprop_travelTime[e]])
break
else:
# try a different bus
found = -1
for e in randomize(currentNode.out_edges(), 1000):
lastTry = currentRoute.pop()
currentRoute.append(lastTry)
if eprop_departure[e] >= (lastTry[2] + lastTry[3]):
newE = e
found = 1
if found != -1:
currentBus = eprop_bus[newE]
currentNode = newE.target()
currentRoute.append([vprop_name[currentNode], eprop_bus[newE], eprop_departure[newE], eprop_travelTime[newE]])
#print currentRoute
threeShortest = []
thirdShortest = 10000
for r in successRoutes:
end = r.pop()
next = r.pop()
length = int(next[2])
r.append(next)
r.append(end)
if length < thirdShortest:
thirdShortest = length
if len(threeShortest) == 3:
threeShortest.pop()
threeShortest.append(r)
else:
threeShortest.append(r)
for r in threeShortest:
print "Attempt:"
print r
print str(len(successRoutes)) + "/" + str(100)
|
luftdanmark/fifo.li
|
fifo/users/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from django.contrib.auth.models import AbstractUser
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
import uuid
@python_2_unicode_compatible
class User(AbstractUser):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
org_id = models.IntegerField(_('Organization ID'), default=-1, blank=False)
name = models.CharField(_('Name of User'), blank=False, max_length=255)
phone = models.CharField(_('Phone Number'), max_length=10, blank=True)
def __str__(self):
return self.username
def get_absolute_url(self):
return reverse('users:detail', kwargs={'username': self.username})
|
luwei0917/awsemmd_script
|
pulling.py
|
#!/usr/bin/env python3
import os
import argparse
import sys
from time import sleep
import subprocess
import imp
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
import datetime
import pickle
matplotlib.style.use('fivethirtyeight')
# print(plt.style.available)
# mypath = os.environ["PATH"]
# os.environ["PATH"] = "/home/wl45/python/bin:/home/wl45/opt:" + mypath
# my_env = os.environ.copy()
parser = argparse.ArgumentParser(
description="Plot my graphs quickly")
parser.add_argument("-s", "--save", action="store_true", default=False)
parser.add_argument("-r", "--reproduce", default=None)
parser.add_argument("-t", "--test", action="store_true", default=False)
parser.add_argument("-m", "--mode", default="pulling")
parser.add_argument("-d", "--debug", action="store_true", default=False)
args = parser.parse_args()
if(args.debug):
do = print
cd = print
else:
do = os.system
cd = os.chdir
if args.reproduce is not None:
print("Reproducing!")
with open(args.reproduce, "rb") as f:
args = pickle.load(f)
print(args)
args.save = False
if(args.test):
print("Hello Test World")
force_list = np.arange(1,2.5,0.1)
array = []
cwd = os.getcwd()
print(cwd)
for force in force_list:
folder = "2d_2_force_" + str(force)
cd(folder)
do("pulling_plotcontour.py")
do("cp test.png ../results/{}.png".format(force))
cd(cwd)
if(args.save):
# print(os.getcwd())
# print(args)
print("Saving")
# print(datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
with open("args"+datetime.datetime.now().strftime("%Y%m%d-%H%M"), "wb") as f:
pickle.dump(args, f)
os.system("cp ~/opt/plot.py plot_{}.py".format(datetime.datetime.now().strftime("%Y%m%d-%H%M")))
if(args.mode == 1):
do("pulling_prepare.py --data -m 6")
do("pulling_glue.py --move -m 4")
folder = "wham"
do("mkdir " + folder)
cd(folder)
do("make_metadata.py -k 200 -a 2 -t")
do("pulling_analysis.py -f -m 8")
do("sbatch freeEnergy.slurm")
|
sridhar912/Self-Driving-Car-NanoDegree
|
CarND-Advanced-Lane-Lines/CameraCalibration.py
|
import numpy as np
import cv2
import pickle
import os
import matplotlib.pyplot as plt
class cameraCalib():
def __init__(self, calib_image_path = 'camera_cal/'):
self.mtx = None
self.dist = None
self.calib_image_path = calib_image_path
self.calib_file = self.calib_image_path + 'camera_param.pickle'
if not os.path.isfile(self.calib_file):
calib_param = self.calc_calibration_parameters(self.calib_image_path, 6, 9, display_corners=False)
self.mtx = calib_param['mtx']
self.dist = calib_param['dist']
with open(self.calib_file, 'wb') as f:
pickle.dump(calib_param, file=f)
else:
with open(self.calib_file, "rb") as f:
calib_param = pickle.load(f)
self.mtx = calib_param['mtx']
self.dist = calib_param['dist']
print('Loaded calibration parameters from disk....')
def calc_calibration_parameters(self, calib_im_path, rows, cols, display_corners=False):
"""Compute calibration parameters from a set of calibration images.
Params:
calib_im_path: Directory of calibration images.
row: checkerboard row number
col: checkerboard col number
Return:
calib_param = {'objpoints': objpoints,
'imgpoints': imgpoints,
'mtx': mtx,
'dist': dist,
'rvecs': rvecs,
'tvecs': tvecs}
"""
# Object / image points collections.
objpoints = []
imgpoints = []
# Calibration points from images.
filenames = os.listdir(calib_im_path)
for fname in filenames:
img = cv2.imread(calib_im_path + fname)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Theoretical Grid.
objp = np.zeros((rows * cols, 3), np.float32)
objp[:, :2] = np.mgrid[0:cols, 0:rows].T.reshape(-1, 2)
# Corners in the image.
ret, corners = cv2.findChessboardCorners(gray, (cols, rows), None)
if ret:
objpoints.append(objp)
imgpoints.append(corners)
if display_corners:
img = cv2.drawChessboardCorners(img, (cols, rows), corners, ret)
plt.imshow(img)
plt.show()
else:
print('Warning! Not chessboard found in image', fname)
# Calibration from image points.
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints,
imgpoints,
img.shape[0:2],
None, None)
calib_param = {'mtx': mtx,
'dist': dist}
return calib_param
def get_camera_parameters(self):
return self.mtx, self.dist
def undistort_image(self, img):
"""Undistort an image.
"""
return cv2.undistort(img, self.mtx, self.dist, None, self.mtx)
def show_undistorted_images(self, org, undist):
"""
Show orginal and undistorted images
:param org: The original image
:param undist: The undistorted image
:return:
"""
orgr = cv2.cvtColor(org, cv2.COLOR_BGR2RGB)
undistr = cv2.cvtColor(undist, cv2.COLOR_BGR2RGB)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(20, 10))
ax1.imshow(orgr)
ax1.set_title('Original Image', fontsize=30)
ax2.imshow(undistr)
ax2.set_title('Undistorted Image', fontsize=30)
plt.show()
def test_calibration(self, fname):
"""Test calibration on an example chessboard, and display the result.
"""
# Load image, draw chessboard and undistort.
img = cv2.imread(fname)
img_undist = self.undistort_image(img)
self.show_undistorted_images(img, img_undist)
|
getodacu/eSENS-eDocument
|
profiles/e_confirmation/xb_request/_sac.py
|
# ./_sac.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:bd794131cb7c2b1e52ff4e6220a49c5d8509c55c
# Generated 2015-02-11 21:35:49.975586 by PyXB version 1.2.4 using Python 2.6.9.final.0
# Namespace urn:oasis:names:specification:ubl:schema:xsd:SignatureAggregateComponents-2 [xmlns:sac]
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:2b2e2fd1-b225-11e4-b26c-14109fe53921')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.4'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
import _ds as _ImportedBinding__ds
import _sbc as _ImportedBinding__sbc
import _cbc as _ImportedBinding__cbc
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('urn:oasis:names:specification:ubl:schema:xsd:SignatureAggregateComponents-2', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
_Namespace_ds = _ImportedBinding__ds.Namespace
_Namespace_ds.configureCategories(['typeBinding', 'elementBinding'])
_Namespace_cbc = _ImportedBinding__cbc.Namespace
_Namespace_cbc.configureCategories(['typeBinding', 'elementBinding'])
_Namespace_sbc = _ImportedBinding__sbc.Namespace
_Namespace_sbc.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Complex type {urn:oasis:names:specification:ubl:schema:xsd:SignatureAggregateComponents-2}SignatureInformationType with content type ELEMENT_ONLY
class SignatureInformationType (pyxb.binding.basis.complexTypeDefinition):
"""
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'SignatureInformationType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-SignatureAggregateComponents-2.1.xsd', 48, 3)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.w3.org/2000/09/xmldsig#}Signature uses Python identifier Signature
__Signature = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(_Namespace_ds, 'Signature'), 'Signature', '__urnoasisnamesspecificationublschemaxsdSignatureAggregateComponents_2_SignatureInformationType_httpwww_w3_org200009xmldsigSignature', False, pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-xmldsig-core-schema-2.1.xsd', 54, 0), )
Signature = property(__Signature.value, __Signature.set, None, None)
# Element {urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2}ID uses Python identifier ID
__ID = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(_Namespace_cbc, 'ID'), 'ID', '__urnoasisnamesspecificationublschemaxsdSignatureAggregateComponents_2_SignatureInformationType_urnoasisnamesspecificationublschemaxsdCommonBasicComponents_2ID', False, pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-CommonBasicComponents-2.1.xsd', 341, 3), )
ID = property(__ID.value, __ID.set, None, None)
# Element {urn:oasis:names:specification:ubl:schema:xsd:SignatureBasicComponents-2}ReferencedSignatureID uses Python identifier ReferencedSignatureID
__ReferencedSignatureID = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(_Namespace_sbc, 'ReferencedSignatureID'), 'ReferencedSignatureID', '__urnoasisnamesspecificationublschemaxsdSignatureAggregateComponents_2_SignatureInformationType_urnoasisnamesspecificationublschemaxsdSignatureBasicComponents_2ReferencedSignatureID', False, pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-SignatureBasicComponents-2.1.xsd', 28, 3), )
ReferencedSignatureID = property(__ReferencedSignatureID.value, __ReferencedSignatureID.set, None, None)
_ElementMap.update({
__Signature.name() : __Signature,
__ID.name() : __ID,
__ReferencedSignatureID.name() : __ReferencedSignatureID
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'SignatureInformationType', SignatureInformationType)
SignatureInformation = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'SignatureInformation'), SignatureInformationType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-SignatureAggregateComponents-2.1.xsd', 44, 3))
Namespace.addCategoryObject('elementBinding', SignatureInformation.name().localName(), SignatureInformation)
SignatureInformationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(_Namespace_ds, 'Signature'), _ImportedBinding__ds.SignatureType, scope=SignatureInformationType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-xmldsig-core-schema-2.1.xsd', 54, 0)))
SignatureInformationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(_Namespace_cbc, 'ID'), _ImportedBinding__cbc.IDType, scope=SignatureInformationType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-CommonBasicComponents-2.1.xsd', 341, 3)))
SignatureInformationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(_Namespace_sbc, 'ReferencedSignatureID'), _ImportedBinding__sbc.ReferencedSignatureIDType, scope=SignatureInformationType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-SignatureBasicComponents-2.1.xsd', 28, 3)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-SignatureAggregateComponents-2.1.xsd', 55, 7))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-SignatureAggregateComponents-2.1.xsd', 62, 7))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-SignatureAggregateComponents-2.1.xsd', 70, 7))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(SignatureInformationType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_cbc, 'ID')), pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-SignatureAggregateComponents-2.1.xsd', 55, 7))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(SignatureInformationType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_sbc, 'ReferencedSignatureID')), pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-SignatureAggregateComponents-2.1.xsd', 62, 7))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(SignatureInformationType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_ds, 'Signature')), pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/common/UBL-SignatureAggregateComponents-2.1.xsd', 70, 7))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
SignatureInformationType._Automaton = _BuildAutomaton()
|
rtfd/readthedocs.org
|
readthedocs/api/v2/views/model_views.py
|
"""Endpoints for listing Projects, Versions, Builds, etc."""
import json
import logging
from allauth.socialaccount.models import SocialAccount
from django.conf import settings
from django.db.models import BooleanField, Case, Value, When
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from rest_framework import decorators, permissions, status, viewsets
from rest_framework.parsers import JSONParser, MultiPartParser
from rest_framework.renderers import BaseRenderer, JSONRenderer
from rest_framework.response import Response
from readthedocs.builds.constants import INTERNAL
from readthedocs.builds.models import Build, BuildCommandResult, Version
from readthedocs.oauth.models import RemoteOrganization, RemoteRepository
from readthedocs.oauth.services import GitHubService, registry
from readthedocs.projects.models import Domain, Project
from readthedocs.storage import build_commands_storage
from ..permissions import APIPermission, APIRestrictedPermission, IsOwner
from ..serializers import (
BuildAdminSerializer,
BuildCommandSerializer,
BuildSerializer,
DomainSerializer,
ProjectAdminSerializer,
ProjectSerializer,
RemoteOrganizationSerializer,
RemoteRepositorySerializer,
SocialAccountSerializer,
VersionAdminSerializer,
VersionSerializer,
)
from ..utils import (
ProjectPagination,
RemoteOrganizationPagination,
RemoteProjectPagination,
)
log = logging.getLogger(__name__)
class PlainTextBuildRenderer(BaseRenderer):
"""
Custom renderer for text/plain format.
charset is 'utf-8' by default.
"""
media_type = 'text/plain'
format = 'txt'
def render(self, data, accepted_media_type=None, renderer_context=None):
renderer_context = renderer_context or {}
response = renderer_context.get('response')
if not response or response.exception:
return data.get('detail', '').encode(self.charset)
data = render_to_string(
'restapi/log.txt',
{'build': data},
)
return data.encode(self.charset)
class DisableListEndpoint:
"""
Helper to disable APIv2 listing endpoint.
We are disablng the listing endpoint because it could cause DOS without
using any type of filtering.
This class disables these endpoints except:
- version resource when passing ``?project__slug=``
- build resource when using ``?commit=``
All the other type of listings are disabled and return 409 CONFLICT with an
error message pointing the user to APIv3.
"""
def list(self, *args, **kwargs):
# Using private repos will list resources the user has access to.
if settings.ALLOW_PRIVATE_REPOS:
return super().list(*args, **kwargs)
disabled = True
# NOTE: keep list endpoint that specifies a resource
if any([
self.basename == 'version' and 'project__slug' in self.request.GET,
self.basename == 'build'
and ('commit' in self.request.GET or 'project__slug' in self.request.GET),
self.basename == 'project' and 'slug' in self.request.GET,
]):
disabled = False
if not disabled:
return super().list(*args, **kwargs)
return Response(
{
'error': 'disabled',
'msg': (
'List endpoint have been disabled due to heavy resource usage. '
'Take into account than APIv2 is planned to be deprecated soon. '
'Please use APIv3: https://docs.readthedocs.io/page/api/v3.html'
)
},
status=status.HTTP_410_GONE,
)
class UserSelectViewSet(viewsets.ModelViewSet):
"""
View set that varies serializer class based on request user credentials.
Viewsets using this class should have an attribute `admin_serializer_class`,
which is a serializer that might have more fields that only admin/staff
users require. If the user is staff, this class will be returned instead.
"""
def get_serializer_class(self):
try:
if (
self.request.user.is_staff and
self.admin_serializer_class is not None
):
return self.admin_serializer_class
except AttributeError:
pass
return self.serializer_class
def get_queryset(self):
"""Use our API manager method to determine authorization on queryset."""
return self.model.objects.api(self.request.user)
class ProjectViewSet(DisableListEndpoint, UserSelectViewSet):
"""List, filter, etc, Projects."""
permission_classes = [APIPermission]
renderer_classes = (JSONRenderer,)
serializer_class = ProjectSerializer
admin_serializer_class = ProjectAdminSerializer
model = Project
pagination_class = ProjectPagination
filterset_fields = ('slug',)
@decorators.action(detail=True)
def translations(self, *_, **__):
translations = self.get_object().translations.all()
return Response({
'translations': ProjectSerializer(translations, many=True).data,
})
@decorators.action(detail=True)
def subprojects(self, request, **kwargs):
project = get_object_or_404(
Project.objects.api(request.user),
pk=kwargs['pk'],
)
rels = project.subprojects.all()
children = [rel.child for rel in rels]
return Response({
'subprojects': ProjectSerializer(children, many=True).data,
})
@decorators.action(detail=True)
def active_versions(self, request, **kwargs):
project = get_object_or_404(
Project.objects.api(request.user),
pk=kwargs['pk'],
)
versions = project.versions(manager=INTERNAL).filter(active=True)
return Response({
'versions': VersionSerializer(versions, many=True).data,
})
@decorators.action(
detail=True,
permission_classes=[permissions.IsAdminUser],
)
def token(self, request, **kwargs):
project = get_object_or_404(
Project.objects.api(request.user),
pk=kwargs['pk'],
)
token = GitHubService.get_token_for_project(project, force_local=True)
return Response({
'token': token,
})
@decorators.action(detail=True)
def canonical_url(self, request, **kwargs):
project = get_object_or_404(
Project.objects.api(request.user),
pk=kwargs['pk'],
)
return Response({
'url': project.get_docs_url(),
})
class VersionViewSet(DisableListEndpoint, UserSelectViewSet):
permission_classes = [APIRestrictedPermission]
renderer_classes = (JSONRenderer,)
serializer_class = VersionSerializer
admin_serializer_class = VersionAdminSerializer
model = Version
filterset_fields = (
'active',
'project__slug',
)
class BuildViewSet(DisableListEndpoint, UserSelectViewSet):
permission_classes = [APIRestrictedPermission]
renderer_classes = (JSONRenderer, PlainTextBuildRenderer)
serializer_class = BuildSerializer
admin_serializer_class = BuildAdminSerializer
model = Build
filterset_fields = ('project__slug', 'commit')
@decorators.action(
detail=False,
permission_classes=[permissions.IsAdminUser],
methods=['get'],
)
def concurrent(self, request, **kwargs):
project_slug = request.GET.get('project__slug')
project = get_object_or_404(Project, slug=project_slug)
limit_reached, concurrent, max_concurrent = Build.objects.concurrent(project)
data = {
'limit_reached': limit_reached,
'concurrent': concurrent,
'max_concurrent': max_concurrent,
}
return Response(data)
def retrieve(self, *args, **kwargs):
"""
Retrieves command data from storage.
This uses files from storage to get the JSON,
and replaces the ``commands`` part of the response data.
"""
if not settings.RTD_SAVE_BUILD_COMMANDS_TO_STORAGE:
return super().retrieve(*args, **kwargs)
instance = self.get_object()
serializer = self.get_serializer(instance)
data = serializer.data
if instance.cold_storage:
storage_path = '{date}/{id}.json'.format(
date=str(instance.date.date()),
id=instance.id,
)
if build_commands_storage.exists(storage_path):
try:
json_resp = build_commands_storage.open(storage_path).read()
data['commands'] = json.loads(json_resp)
except Exception:
log.exception(
'Failed to read build data from storage. path=%s.',
storage_path,
)
return Response(data)
@decorators.action(
detail=True,
permission_classes=[permissions.IsAdminUser],
methods=['post'],
)
def reset(self, request, **kwargs):
"""Reset the build so it can be re-used when re-trying."""
instance = self.get_object()
instance.reset()
return Response(status=status.HTTP_204_NO_CONTENT)
class BuildCommandViewSet(DisableListEndpoint, UserSelectViewSet):
parser_classes = [JSONParser, MultiPartParser]
permission_classes = [APIRestrictedPermission]
renderer_classes = (JSONRenderer,)
serializer_class = BuildCommandSerializer
model = BuildCommandResult
class DomainViewSet(DisableListEndpoint, UserSelectViewSet):
permission_classes = [APIRestrictedPermission]
renderer_classes = (JSONRenderer,)
serializer_class = DomainSerializer
model = Domain
class RemoteOrganizationViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = [IsOwner]
renderer_classes = (JSONRenderer,)
serializer_class = RemoteOrganizationSerializer
model = RemoteOrganization
pagination_class = RemoteOrganizationPagination
def get_queryset(self):
return (
self.model.objects.api(self.request.user).filter(
remote_organization_relations__account__provider__in=[
service.adapter.provider_id for service in registry
]
).distinct()
)
class RemoteRepositoryViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = [IsOwner]
renderer_classes = (JSONRenderer,)
serializer_class = RemoteRepositorySerializer
model = RemoteRepository
pagination_class = RemoteProjectPagination
def get_queryset(self):
if not self.request.user.is_authenticated:
return self.model.objects.none()
# TODO: Optimize this query after deployment
query = self.model.objects.api(self.request.user).annotate(
admin=Case(
When(
remote_repository_relations__user=self.request.user,
remote_repository_relations__admin=True,
then=Value(True)
),
default=Value(False),
output_field=BooleanField()
)
)
full_name = self.request.query_params.get('full_name')
if full_name is not None:
query = query.filter(full_name__icontains=full_name)
org = self.request.query_params.get('org', None)
if org is not None:
query = query.filter(organization__pk=org)
own = self.request.query_params.get('own', None)
if own is not None:
query = query.filter(
remote_repository_relations__account__provider=own,
organization=None,
)
query = query.filter(
remote_repository_relations__account__provider__in=[
service.adapter.provider_id for service in registry
],
).distinct()
# optimizes for the RemoteOrganizationSerializer
query = query.select_related('organization').order_by(
'organization__name', 'full_name'
)
return query
class SocialAccountViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = [IsOwner]
renderer_classes = (JSONRenderer,)
serializer_class = SocialAccountSerializer
model = SocialAccount
def get_queryset(self):
return self.model.objects.filter(user=self.request.user.pk)
|
wurstfabrik/wurst-cli
|
wurstc/cli/utils.py
|
# -- encoding: UTF-8 --
import sys
from click import echo
from colorama import Fore, Style
can_use_real_emoji = (
sys.stdout.isatty() and
sys.platform != "win32"
)
def success(msg):
if can_use_real_emoji:
sign = "\U0001F44C"
else:
sign = "[+] "
echo(Fore.GREEN + Style.BRIGHT + sign + msg)
def get_site_from_context(ctx):
try:
return ctx.meta["wurst.project"].site or None
except KeyError:
pass
|
WaltonSimons/PhoneBot
|
smsd.py
|
import gammu.smsd
import thread
class smsd(object):
"""Starts gammu in another thread so the bot can interpret incoming sms"""
def __init__(self, configpath):
self.sms = gammu.smsd.SMSD(configpath)
self.thread = None
def start(self):
self.thread = thread.start_new_thread(self.sms.MainLoop, ())
def stop(self):
self.sms.Shutdown()
def inject_sms(self, phonenumber, message):
smsinfo = {
'Class': -1,
'Unicode': True,
'Entries': [
{
'ID': 'ConcatenatedTextLong',
'Buffer': message
}
]}
encoded = gammu.encode(smsinfo)
for message in encoded:
message['SMSC'] = {'Location': 1}
message['Number'] = phonenumber
self.sms.InjectSMS(encoded)
|
doriancoins/doriancoin
|
test/functional/interface_zmq.py
|
#!/usr/bin/env python3
# Copyright (c) 2015-2017 The Doriancoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the ZMQ notification interface."""
import configparser
import os
import struct
from test_framework.test_framework import DoriancoinTestFramework, SkipTest
from test_framework.mininode import CTransaction
from test_framework.util import (assert_equal,
bytes_to_hex_str,
hash256,
)
from io import BytesIO
class ZMQSubscriber:
def __init__(self, socket, topic):
self.sequence = 0
self.socket = socket
self.topic = topic
import zmq
self.socket.setsockopt(zmq.SUBSCRIBE, self.topic)
def receive(self):
topic, body, seq = self.socket.recv_multipart()
# Topic should match the subscriber topic.
assert_equal(topic, self.topic)
# Sequence should be incremental.
assert_equal(struct.unpack('<I', seq)[-1], self.sequence)
self.sequence += 1
return body
class ZMQTest (DoriancoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
def setup_nodes(self):
# Try to import python3-zmq. Skip this test if the import fails.
try:
import zmq
except ImportError:
raise SkipTest("python3-zmq module not available.")
# Check that doriancoin has been built with ZMQ enabled.
config = configparser.ConfigParser()
if not self.options.configfile:
self.options.configfile = os.path.abspath(os.path.join(os.path.dirname(__file__), "../config.ini"))
config.read_file(open(self.options.configfile))
if not config["components"].getboolean("ENABLE_ZMQ"):
raise SkipTest("doriancoind has not been built with zmq enabled.")
# Initialize ZMQ context and socket.
# All messages are received in the same socket which means
# that this test fails if the publishing order changes.
# Note that the publishing order is not defined in the documentation and
# is subject to change.
address = "tcp://127.0.0.1:28332"
self.zmq_context = zmq.Context()
socket = self.zmq_context.socket(zmq.SUB)
socket.set(zmq.RCVTIMEO, 60000)
socket.connect(address)
# Subscribe to all available topics.
self.hashblock = ZMQSubscriber(socket, b"hashblock")
self.hashtx = ZMQSubscriber(socket, b"hashtx")
self.rawblock = ZMQSubscriber(socket, b"rawblock")
self.rawtx = ZMQSubscriber(socket, b"rawtx")
self.extra_args = [["-zmqpub%s=%s" % (sub.topic.decode(), address) for sub in [self.hashblock, self.hashtx, self.rawblock, self.rawtx]], []]
self.add_nodes(self.num_nodes, self.extra_args)
self.start_nodes()
def run_test(self):
try:
self._zmq_test()
finally:
# Destroy the ZMQ context.
self.log.debug("Destroying ZMQ context")
self.zmq_context.destroy(linger=None)
def _zmq_test(self):
num_blocks = 5
self.log.info("Generate %(n)d blocks (and %(n)d coinbase txes)" % {"n": num_blocks})
genhashes = self.nodes[0].generate(num_blocks)
self.sync_all()
for x in range(num_blocks):
# Should receive the coinbase txid.
txid = self.hashtx.receive()
# Should receive the coinbase raw transaction.
hex = self.rawtx.receive()
tx = CTransaction()
tx.deserialize(BytesIO(hex))
tx.calc_sha256()
assert_equal(tx.hash, bytes_to_hex_str(txid))
# Should receive the generated block hash.
hash = bytes_to_hex_str(self.hashblock.receive())
assert_equal(genhashes[x], hash)
# The block should only have the coinbase txid.
assert_equal([bytes_to_hex_str(txid)], self.nodes[1].getblock(hash)["tx"])
# Should receive the generated raw block.
block = self.rawblock.receive()
assert_equal(genhashes[x], bytes_to_hex_str(hash256(block[:80])))
self.log.info("Wait for tx from second node")
payment_txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
self.sync_all()
# Should receive the broadcasted txid.
txid = self.hashtx.receive()
assert_equal(payment_txid, bytes_to_hex_str(txid))
# Should receive the broadcasted raw transaction.
hex = self.rawtx.receive()
assert_equal(payment_txid, bytes_to_hex_str(hash256(hex)))
if __name__ == '__main__':
ZMQTest().main()
|
sigmunjr/VirtualPetFence
|
runSegmentation.py
|
import cv2
import skimage.io
import skimage.transform
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
from CatFinder import CatFinder
from CatPlayer import CatPlayer
from DrawGui import DrawArea
def load_image(path, size=224):
img = skimage.io.imread(path)
short_edge = min(img.shape[:2])
yy = int((img.shape[0] - short_edge) / 2)
xx = int((img.shape[1] - short_edge) / 2)
crop_img = img[yy : yy + short_edge, xx : xx + short_edge]
resized_img = skimage.transform.resize(crop_img, (size, size))
return resized_img
#img = load_image('/usr/local/data/ludde/DSC_0065.jpg')
#img = np.tile(img, (32, 1, 1, 1))
alpha = 0.6
draw = DrawArea()
drawing = draw.doDraw()
def nothing(x):
global alpha
alpha = x/100.
catPlayer = CatPlayer()
cv2.namedWindow('cam')
cv2.createTrackbar('alpha','cam',50,100,nothing)
cap = cv2.VideoCapture(0)
rval, frame = cap.read()
catFinder = CatFinder('/tmp/models/catnet12')
xrange = np.arange(224)
xv, yv = np.meshgrid(xrange, xrange)
up_scale_y = 640/224.
up_scale_x = 480/224.
cat_list = range(100)
cnt = 0
def saveImages(img_list, name):
for i in range(len(img_list)): cv2.imwrite("cat_" + str(i) + ".jpg", img_list[i])
while rval:
print 'alpha:', alpha
img = cv2.resize(frame, (224, 224))
img = img.astype(np.float32)/255
img = img[np.newaxis, :, :, ::-1]
frame[drawing] = (0, 0, 255)
segmentedCat = np.squeeze(catFinder.getProbability(img))# > alpha
segmentedCatBig = cv2.resize(segmentedCat, (640, 480)) > alpha
frame[segmentedCatBig] = 0.5*frame[segmentedCatBig] + frame[segmentedCatBig]*np.array([0.5, 0, 0])
if segmentedCat.max() > 0.90:
print 'IS CAT'
prod_dist = segmentedCat/segmentedCat.sum()
centerx = np.sum(prod_dist*xv)*up_scale_x
centery = np.sum(prod_dist*yv)*up_scale_y
cv2.circle(frame, (int(centery), int(centerx)), 4, (0, 255, 0))
if drawing[centerx, centery]:
catPlayer.play()
else:
catPlayer.pause()
#FOR VIDEO
cat_list[cnt%100] = frame
cnt += 1
cv2.imshow('cam', frame)
rval, frame = cap.read()
key = cv2.waitKey(1)
if key == 27:
break
segmentedCat = catFinder(img)
seg = np.zeros((224, 224, 3))
seg[:, :, 2] = segmentedCat
plt.imshow(0.5*seg + 0.5*img[0])
print segmentedCat
|
pisskidney/leetcode
|
medium/406.py
|
#!/usr/bin/python
class Solution(object):
def reconstructQueue(self, people):
print people
people = sorted(people, key=lambda x: x[1])
print people
people = sorted(people, key=lambda x: -x[0])
print people
res = []
for p in people:
res.insert(p[1], p)
return res
class SolutionDumb(object):
def reconstructQueue(self, people):
"""
:type people: List[List[int]]
:rtype: List[List[int]]
"""
res = []
people = sorted(people, key = lambda x: x[0])
for i in xrange(len(people)):
found = False
j = i
while not found and j >= 0:
for p in people:
if p[1] == j:
taller = 0
for r in res:
if r[0] >= p[0]:
taller += 1
if taller == p[1]:
people.remove(p)
res.append(p)
found = True
break
j -= 1
return res
s = Solution()
x = [[7,0], [4,4], [7,1], [5,0], [6,1], [5,2]]
print s.reconstructQueue(x)
|
kjellmf/blend2tikz
|
tikz_export.py
|
#!BPY
"""
Name: 'TikZ (.tex)...'
Blender: 245
Group: 'Export'
Tooltip: 'Export selected curves as TikZ paths for use with (La)TeX'
"""
__author__ = 'Kjell Magne Fauske'
__version__ = "1.0"
__url__ = ("Documentation, http://www.fauskes.net/code/blend2tikz/documentation/",
"Author's homepage, http://www.fauskes.net/",
"TikZ examples, http://www.fauskes.net/pgftikzexamples/")
__bpydoc__ = """\
This script exports selected curves and empties to TikZ format for use with TeX.
PGF and TikZ is a powerful macro package for creating high quality illustrations
and graphics for use with (La|Con)TeX.
Important: TikZ is primarily for creating 2D illustrations. This script will
therefore only export the X and Y coordinates. However, the Z coordinate is used
to determine draw order.
Usage:
Select the objects you want to export and invoke the script from the
"File->Export" menu[1]. Alternatively you can load and run the script from
inside Blender.
A dialog box will pop up with various options:<br>
- Draw: Insert a draw operation in the generated path.<br>
- Fill: Insert a fill operation in the generated path.<br>
- Transform: Apply translation and scale transformations.<br>
- Materials: Export materials assigned to curves.<br>
- Empties: Export empties as named coordinates.<br>
- Only properties: Use on the style property of materials if set.<br>
- Standalone: Create a standalone document.<br>
- Only code: Generate only code for drawing paths.<br>
- Clipboard: Copy generated code to the clipboard. <br>
Properties:
If an object is assigned a ID property or game property named 'style' of type
string, its value will be added to the path as an option. You can use the
Help->Property ID browser to set this value, or use the Logic panel to
add a game property.
Materials:
The exporter has basic support for materials. By default the material's RGB
value is used as fill or draw color. You can also set the alpha value for
transparency effects.
An alternative is to specify style options
directly by putting the values in a 'style' property assigned to the material.
You can use the Help->Property ID browser to set this value.
Issues:<br>
- Only bezier and polyline curves are supported.<br>
- A full Python install is required for clipboard support on Windows. Other platforms
need the standard subprocess module (requires Python 2.4 or later). Additionally:<br>
* Windows users need to install the PyWin32 module.<br>
* Unix-like users need the xclip command line tool or the PyGTK_ module installed.<br>
* OS X users need the pbcopy command line tool installed.<br>
[1] Requires you to put the script in Blender's scripts folder. Blender will
then automatically detect the script.
"""
import Blender
from Blender import sys as bsys
from itertools import izip
import itertools, math
from Blender import Mesh, Mathutils, Registry, Scene, Material, Group
from textwrap import wrap
from string import Template
# Curve types
TYPE_POLY = 0
TYPE_BEZIER = 1
TYPE_NURBS = 4
R2D = 180.0 / math.pi
# Start of configuration section -------
# Templates
standalone_template = r"""
\documentclass{article}
\usepackage{tikz}
%(preamble)s
%(materials)s
\begin{document}
\begin{tikzpicture}
%(pathcode)s
\end{tikzpicture}
\end{document}
"""
fig_template = r"""
%(materials)s
\begin{tikzpicture}
%(pathcode)s
\end{tikzpicture}
"""
REG_KEY = 'tikz_export'
# config options:
STANDALONE = True
CODE_ONLY = False
DRAW_CURVE = True
FILL_CLOSED_CURVE = True
TRANSFORM_CURVE = True
CLIPBOARD_OUTPUT = False
EMPTIES = True
EXPORT_MATERIALS = False
ONLY_PROPERTIES = False
USE_PLOTPATH = False
WRAP_LINES = True
tooltips = {
'STANDALONE': 'Output a standalone document',
'DRAW_CURVE':
'Draw curves',
'FILL_CLOSED_CURVE':
'Fill closed curves',
'TRANSFORM_CURVE':
'Apply transformations',
'CLIPBOARD_OUTPUT':
'Put generated code on clipboard',
'CODE_ONLY':
'Output pathcode only',
'EMPTIES': 'Export empties',
'EXPORT_MATERIALS': 'Apply materials to curves',
'ONLY_PROPERTIES':
'Use only properties for materials with the style property set',
'USE_PLOTPATH':
'Use the plot path operations for polylines',
'WRAP_LINES':
'Wrap long lines',
}
def update_registry():
d = {
'STANDALONE': STANDALONE,
'DRAW_CURVE': DRAW_CURVE,
'FILL_CLOSED_CURVE': FILL_CLOSED_CURVE,
'TRANSFORM_CURVE': TRANSFORM_CURVE,
'CLIPBOARD_OUTPUT': CLIPBOARD_OUTPUT,
'CODE_ONLY': CODE_ONLY,
'EMPTIES': EMPTIES,
'EXPORT_MATERIALS': EXPORT_MATERIALS,
'ONLY_PROPERTIES': ONLY_PROPERTIES,
'USE_PLOTPATH': USE_PLOTPATH,
'WRAP_LINES': WRAP_LINES,
}
Registry.SetKey(REG_KEY, d, True)
# Looking for a saved key in Blender.Registry dict:
rd = Registry.GetKey(REG_KEY, True)
if rd:
try:
STANDALONE = rd['STANDALONE']
DRAW_CURVE = rd['DRAW_CURVE']
FILL_CLOSED_CURVE = rd['FILL_CLOSED_CURVE']
TRANSFORM_CURVE = rd['TRANSFORM_CURVE']
CLIPBOARD_OUTPUT = rd['CLIPBOARD_OUTPUT']
CODE_ONLY = rd['CODE_ONLY']
EMPTIES = rd['EMPTIES']
EXPORT_MATERIALS = rd['EXPORT_MATERIALS']
ONLY_PROPERTIES = rd['ONLY_PROPERTIES']
USE_PLOTPATH = rd['USE_PLOTPATH']
WRAP_LINES = rd['WRAP_LINES']
except KeyError:
print "Keyerror"
update_registry()
else:
print "update registry"
update_registry()
# Start of GUI section ------------------------------------------------
from Blender import Draw
def draw_GUI():
global STANDALONE, DRAW_CURVE, FILL_CLOSED_CURVE, TRANSFORM_CURVE
global CLIPBOARD_OUTPUT, CODE_ONLY, EMPTIES, EXPORT_MATERIALS
global ONLY_PROPERTIES
global USE_PLOTPATH
global WRAP_LINES
standalonetog = Draw.Create(STANDALONE)
codeonlytog = Draw.Create(CODE_ONLY)
drawcurvetog = Draw.Create(DRAW_CURVE)
fillcurvetog = Draw.Create(FILL_CLOSED_CURVE)
transformcurvetog = Draw.Create(TRANSFORM_CURVE)
clipboardtog = Draw.Create(CLIPBOARD_OUTPUT)
emptiestog = Draw.Create(EMPTIES)
materialstog = Draw.Create(EXPORT_MATERIALS)
onlyproptog = Draw.Create(ONLY_PROPERTIES)
useplotpathtog = Draw.Create(USE_PLOTPATH)
wraplinestog = Draw.Create(WRAP_LINES)
block = []
#block.append("Export:")
block.append(("Draw", drawcurvetog, tooltips['DRAW_CURVE']))
block.append(("Fill", fillcurvetog, tooltips['FILL_CLOSED_CURVE']))
block.append(("Transform", transformcurvetog, tooltips['TRANSFORM_CURVE']))
block.append(("Use plot path", useplotpathtog, tooltips['USE_PLOTPATH']))
block.append("Export:")
block.append(("Materials", materialstog, tooltips['EXPORT_MATERIALS']))
block.append(("Empties", emptiestog, tooltips['EMPTIES']))
block.append("Material options:")
block.append(("Only properties", onlyproptog, tooltips['ONLY_PROPERTIES']))
block.append('Ouput options')
block.append(("Standalone", standalonetog, tooltips['STANDALONE']))
block.append(("Only code", codeonlytog, tooltips['CODE_ONLY']))
block.append(("Clipboard", clipboardtog, tooltips['CLIPBOARD_OUTPUT']))
block.append(("Wrap lines", wraplinestog, tooltips['WRAP_LINES']))
retval = Blender.Draw.PupBlock("Blend2TikZ options", block)
if retval:
# set options
STANDALONE = standalonetog.val
DRAW_CURVE = drawcurvetog.val
FILL_CLOSED_CURVE = fillcurvetog.val
TRANSFORM_CURVE = transformcurvetog.val
CLIPBOARD_OUTPUT = clipboardtog.val
CODE_ONLY = codeonlytog.val
EMPTIES = emptiestog.val
EXPORT_MATERIALS = materialstog.val
ONLY_PROPERTIES = onlyproptog.val
USE_PLOTPATH = useplotpathtog.val
WRAP_LINES = wraplinestog.val
update_registry()
return retval
# End of GUI section ----------------------
# End of configuration section ---------
X = 0
Y = 1
used_materials = {}
# Utility functions
def nsplit(seq, n=2):
"""Split a sequence into pieces of length n
If the lengt of the sequence isn't a multiple of n, the rest is discareded.
Note that nsplit will strings into individual characters.
Examples:
>>> nsplit('aabbcc')
[('a', 'a'), ('b', 'b'), ('c', 'c')]
>>> nsplit('aabbcc',n=3)
[('a', 'a', 'b'), ('b', 'c', 'c')]
# Note that cc is discarded
>>> nsplit('aabbcc',n=4)
[('a', 'a', 'b', 'b')]
"""
return [xy for xy in izip(*[iter(seq)] * n)]
def mreplace(s, chararray, newchararray):
for a, b in zip(chararray, newchararray):
s = s.replace(a, b)
return s
def tikzify(s):
if s.strip():
return mreplace(s, r'\,:.', '-+_*')
else:
return ""
def copy_to_clipboard(text):
"""Copy text to the clipboard
Returns True if successful. False otherwise.
Works on Windows, *nix and Mac. Tries the following:
1. Use the win32clipboard module from the win32 package.
2. Calls the xclip command line tool (*nix)
3. Calls the pbcopy command line tool (Mac)
4. Try pygtk
"""
# try windows first
try:
import win32clipboard
win32clipboard.OpenClipboard()
win32clipboard.EmptyClipboard()
win32clipboard.SetClipboardText(text)
win32clipboard.CloseClipboard()
return True
except:
pass
# try xclip
try:
import subprocess
p = subprocess.Popen(['xclip', '-selection', 'c'], stdin=subprocess.PIPE)
p.stdin.write(text)
p.stdin.close()
retcode = p.wait()
return True
except:
pass
# try pbcopy (Os X)
try:
import subprocess
p = subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE)
p.stdin.write(text)
p.stdin.close()
retcode = p.wait()
return True
except:
pass
# try os /linux
try:
import subprocess
p = subprocess.Popen(['xsel'], stdin=subprocess.PIPE)
p.stdin.write(text)
p.stdin.close()
retcode = p.wait()
return True
except:
pass
# try pygtk
try:
# Code from
# http://www.vector-seven.com/2007/06/27/
# passing-data-between-gtk-applications-with-gtkclipboard/
import pygtk
pygtk.require('2.0')
import gtk
# get the clipboard
clipboard = gtk.clipboard_get()
# set the clipboard text data
clipboard.set_text(text)
# make our data available to other applications
clipboard.store()
except:
return False
def get_property(obj, name):
"""Get named object property
Looks first in custom properties, then game properties. Returns a list.
"""
prop_value = []
try:
prop_value.append(obj.properties[name])
except:
pass
try:
# look for game properties
prop = obj.getProperty(name)
if prop.type == "STRING" and prop.data.strip():
prop_value.append(prop.data)
except:
pass
return prop_value
def get_material(material):
"""Convert material to TikZ options"""
if not material:
return ""
opts = ""
mat_name = tikzify(material.name)
used_materials[mat_name] = material
return mat_name
def write_materials(used_materials):
"""Return code for the used materials"""
c = "% Materials section \n"
for material in used_materials.values():
mat_name = tikzify(material.name)
matopts = ''
proponly = ONLY_PROPERTIES
try:
proponly = material.properties['onlyproperties']
if proponly and type(proponly) == str:
proponly = proponly.lower() not in ('0', 'false')
except:
pass
try:
matopts = material.properties['style']
except:
pass
rgb = material.rgbCol
spec = material.specCol
alpha = material.alpha
flags = material.getMode()
options = []
if not (proponly and matopts):
c += "\\definecolor{%s_col}{rgb}{%s,%s,%s}\n" \
% tuple([mat_name] + rgb)
options.append('%s_col' % mat_name)
if alpha < 1.0:
options.append('opacity=%s' % alpha)
if matopts:
options += [matopts]
c += "\\tikzstyle{%s}= [%s]\n" % (mat_name, ",".join(options))
return c
def write_object(obj, empties):
"""Write Curves"""
s = ""
name = obj.name
prop = obj.properties
mtrx = obj.matrix.rotationPart()
x, y, z = obj.getLocation('worldspace')
rot = obj.getEuler('worldspace')
scale_x, scale_y, scale_z = obj.matrix.scalePart()
# Convert to degrees
rot_z = rot.z * R2D
if obj.type not in ["Curve", "Empty"]:
return s
ps = ""
if obj.type == 'Curve':
curvedata = obj.data
s += "%% %s\n" % name
for curnurb in curvedata:
if curnurb.type == TYPE_BEZIER:
knots = []
handles = []
# Build lists of knots and handles
for point in curnurb:
h1, knot, h2 = point.vec
handles.extend([h1, h2])
knots.append("(+%.4f,+%.4f)" % (knot[X], knot[Y]))
if curnurb.isCyclic():
# The curve is closed.
# Move the first handle to the end of the handles list.
handles = handles[1:] + [handles[0]]
# Repeat the first knot at the end of the knot list
knots.append(knots[0])
else:
# We don't need the first and last handles since the curve is
# not closed.
handles = handles[1:-1]
hh = []
for h1, h2 in nsplit(handles, 2):
hh.append("controls (+%.4f,+%.4f) and (+%.4f,+%.4f)" \
% (h1[X], h1[Y], h2[X], h2[Y]))
ps += "%s\n" % knots[0]
for h, k in zip(hh, knots[1:]):
ps += " .. %s .. %s\n" % (h, k)
if curnurb.isCyclic():
ps += " -- cycle\n"
elif curnurb.type == TYPE_POLY:
coords = ["(+%.4f,+%.4f)" % (point[X], point[Y]) for point in curnurb]
if USE_PLOTPATH:
plotopts = get_property(obj, 'plotstyle')
if plotopts:
poptstr = "[%s]" % ",".join(plotopts)
else:
poptstr = ''
ps += " plot%s coordinates {%s}" % (poptstr, " ".join(coords))
if curnurb.isCyclic():
ps += " -- cycle"
if WRAP_LINES:
ps = "\n".join(wrap(ps, 80, subsequent_indent=" ", break_long_words=False))
else:
if curnurb.isCyclic():
coords.extend([coords[0], 'cycle\n '])
# Join the coordinates. Could have used "--".join(coords), but
# have to add some logic for pretty printing.
if WRAP_LINES:
ps += "%s\n " % coords[0]
i = 0
for c in coords[1:]:
i += 1
if i % 3:
ps += "-- %s" % c
else:
ps += " -- %s\n " % c
else:
ps += "%s" % " -- ".join(coords)
else:
continue
if not ps:
return s
options = []
if DRAW_CURVE:
options += ['draw']
if FILL_CLOSED_CURVE:
if ps.find('cycle') > 0:
options += ['fill']
if TRANSFORM_CURVE:
if x <> 0: options.append('xshift=%.4fcm' % x)
if y <> 0: options.append('yshift=%.4fcm' % y)
if rot_z <> 0: options.append('rotate=%.4f' % rot_z)
if scale_x <> 1: options += ['xscale=%.4f' % scale_x]
if scale_y <> 1: options += ['yscale=%.4f' % scale_y]
if EXPORT_MATERIALS:
try:
materials = obj.data.getMaterials()
except:
materials = []
if materials:
# pick first material
for mat in materials:
if mat:
matopts = get_material(mat)
options.append(matopts)
break
extraopts = get_property(obj, 'style')
if extraopts:
options.extend(extraopts)
optstr = ",".join(options)
emptstr = ""
if EMPTIES:
if obj in empties:
for empty in empties[obj]:
# Get correct coordinate relative to the parent
if TRANSFORM_CURVE:
ex, ey, ez = (empty.mat * (obj.mat.copy()).invert()).translationPart()
else:
ex, ey, ez = (empty.matrix - obj.matrix).translationPart()
emptstr += " (+%.4f,+%.4f) coordinate (%s)\n" \
% (ex, ey, empty.name)
if not WRAP_LINES:
ps = ' '.join(ps.replace('\n', ' ').split())
if len(optstr) > 50 or emptstr:
s += "\\path[%s]\n%s %s;\n" % (optstr, emptstr, ps.rstrip())
else:
s += "\\path[%s] %s;\n" % (optstr, ps.rstrip())
elif obj.type == 'Empty' and EMPTIES and not obj.parent:
x, y, z = obj.loc
s += "\\coordinate (%s) at (%.4f,%.4f);\n" % (tikzify(obj.name), x, y)
return s
def write_objects(filepath):
"""Write all selected objects to filepath"""
def z_comp(a, b):
x, y, z1 = a.getLocation('worldspace')
x, y, z2 = b.getLocation('worldspace')
return cmp(z1, z2)
# get all selected objects
objects = Blender.Object.GetSelected()
# get current scene
scn = Blender.Scene.GetCurrent()
# iterate over each object
code = ""
# Find all empties with parents
empties_wp = [obj for obj in objects if obj.type == 'Empty' and obj.parent]
empties_dict = {}
for empty in empties_wp:
if empty.parent in empties_dict:
empties_dict[empty.parent] += [empty]
else:
empties_dict[empty.parent] = [empty]
for obj in sorted(objects, z_comp):
code += write_object(obj, empties_dict)
s = ""
if EXPORT_MATERIALS:
matcode = write_materials(used_materials)
else:
matcode = ""
try:
preamblecode = scn.properties['preamble']
except:
preamblecode = ''
templatevars = dict(pathcode=code, preamble=preamblecode, materials=matcode)
if STANDALONE:
extra = ""
try:
preambleopt = scn.properties['preamble']
templatevars['preamble'] = str(preambleopt)
except:
pass
template = standalone_template
elif CODE_ONLY:
template = "%(pathcode)s"
else:
template = fig_template
s = template % templatevars
if not CLIPBOARD_OUTPUT:
try:
f = file(filepath, 'w')
# write header to file
f.write('%% Generated by tikz_export.py v %s \n' % (__version__))
f.write(s)
print "Code written to %s" % filepath
finally:
f.close()
return
else:
success = copy_to_clipboard(s)
if not success:
print "Failed to copy code to the clipboard"
print "Pywin32, xclip, cbcopy or pygtk required for clipboard support"
Blender.Draw.PupMenu('ERROR: Failed to copy generated code to the clipboard')
# Start of script -----------------------------------------------------
# Ensure that at leas one object is selected
if len(Blender.Object.GetSelected()) == 0:
# no objects selected. Print error message and quit
Blender.Draw.PupMenu('ERROR: Please select at least one curve')
else:
fname = bsys.makename(ext=".tex")
retval = draw_GUI()
if retval and not CLIPBOARD_OUTPUT:
Blender.Window.FileSelector(write_objects, "Export TikZ", fname)
write_objects(fname)
print "tikz_export ended ..."
|
jorisroovers/pymarkdownlint
|
pymarkdownlint/cli.py
|
import pymarkdownlint
from pymarkdownlint.filefinder import MarkdownFileFinder
from pymarkdownlint.lint import MarkdownLinter
from pymarkdownlint.config import LintConfig
import os
import click
DEFAULT_CONFIG_FILE = ".markdownlint"
def echo_files(files):
for f in files:
click.echo(f)
exit(0)
def get_lint_config(config_path=None):
""" Tries loading the config from the given path. If no path is specified, the default config path
is tried, and if that is not specified, we the default config is returned. """
# config path specified
if config_path:
config = LintConfig.load_from_file(config_path)
click.echo("Using config from {0}".format(config_path))
# default config path
elif os.path.exists(DEFAULT_CONFIG_FILE):
config = LintConfig.load_from_file(DEFAULT_CONFIG_FILE)
click.echo("Using config from {0}".format(DEFAULT_CONFIG_FILE))
# no config file
else:
config = LintConfig()
return config
@click.command()
@click.option('--config', type=click.Path(exists=True),
help="Config file location (default: {0}).".format(DEFAULT_CONFIG_FILE))
@click.option('--list-files', is_flag=True, help="List markdown files in given path and exit.")
@click.option('--ignore', default="", help="Ignore rules (comma-separated by id or name).")
@click.argument('path', type=click.Path(exists=True))
@click.version_option(version=pymarkdownlint.__version__)
def cli(list_files, config, ignore, path):
""" Markdown lint tool, checks your markdown for styling issues """
files = MarkdownFileFinder.find_files(path)
if list_files:
echo_files(files)
lint_config = get_lint_config(config)
lint_config.apply_on_csv_string(ignore, lint_config.disable_rule)
linter = MarkdownLinter(lint_config)
error_count = linter.lint_files(files)
exit(error_count)
if __name__ == "__main__":
cli()
|
ducted/duct
|
duct/protocol/sflow/protocol/counters.py
|
"""
.. module:: counters
:synopsis: SFlow counter object interfaces
.. moduleauthor:: Colin Alston <colin@imcol.in>
"""
from construct import Struct, UBInt32, Array, Bytes
class InterfaceCounters(object):
"""Counters for network interfaces
"""
def __init__(self, u):
self.if_index = u.unpack_uint()
self.if_type = u.unpack_uint()
self.if_speed = u.unpack_uhyper()
self.if_mode = u.unpack_uint()
self.if_status = u.unpack_uint()
self.if_inOctets = u.unpack_uhyper()
self.if_inPackets = u.unpack_uint()
self.if_inMcast = u.unpack_uint()
self.if_inBcast = u.unpack_uint()
self.if_inDiscard = u.unpack_uint()
self.if_inError = u.unpack_uint()
self.if_unknown = u.unpack_uint()
self.if_outOctets = u.unpack_uhyper()
self.if_outPackets = u.unpack_uint()
self.if_outMcast = u.unpack_uint()
self.if_outBcast = u.unpack_uint()
self.if_outDiscard = u.unpack_uint()
self.if_outError = u.unpack_uint()
self.if_promisc = u.unpack_uint()
class EthernetCounters(object):
"""Counters for ethernet frames
"""
def __init__(self, u):
self.dot3StatsAlignmentErrors = u.unpack_uint()
self.dot3StatsFCSErrors = u.unpack_uint()
self.dot3StatsSingleCollisionFrames = u.unpack_uint()
self.dot3StatsMultipleCollisionFrames = u.unpack_uint()
self.dot3StatsSQETestErrors = u.unpack_uint()
self.dot3StatsDeferredTransmissions = u.unpack_uint()
self.dot3StatsLateCollisions = u.unpack_uint()
self.dot3StatsExcessiveCollisions = u.unpack_uint()
self.dot3StatsInternalMacTransmitErrors = u.unpack_uint()
self.dot3StatsCarrierSenseErrors = u.unpack_uint()
self.dot3StatsFrameTooLongs = u.unpack_uint()
self.dot3StatsInternalMacReceiveErrors = u.unpack_uint()
self.dot3StatsSymbolErrors = u.unpack_uint()
class VLANCounters(object):
"""Counters for VLANs
"""
def __init__(self, u):
self.vlan_id = u.unpack_uint()
self.octets = u.unpack_uhyper()
self.ucastPkts = u.unpack_uint()
self.multicastPkts = u.unpack_uint()
self.broadcastPkts = u.unpack_uint()
self.discards = u.unpack_uint()
class TokenringCounters(object):
"""Counters for Token ring networks
"""
def __init__(self, u):
self.dot5StatsLineErrors = u.unpack_uint()
self.dot5StatsBurstErrors = u.unpack_uint()
self.dot5StatsACErrors = u.unpack_uint()
self.dot5StatsAbortTransErrors = u.unpack_uint()
self.dot5StatsInternalErrors = u.unpack_uint()
self.dot5StatsLostFrameErrors = u.unpack_uint()
self.dot5StatsReceiveCongestions = u.unpack_uint()
self.dot5StatsFrameCopiedErrors = u.unpack_uint()
self.dot5StatsTokenErrors = u.unpack_uint()
self.dot5StatsSoftErrors = u.unpack_uint()
self.dot5StatsHardErrors = u.unpack_uint()
self.dot5StatsSignalLoss = u.unpack_uint()
self.dot5StatsTransmitBeacons = u.unpack_uint()
self.dot5StatsRecoverys = u.unpack_uint()
self.dot5StatsLobeWires = u.unpack_uint()
self.dot5StatsRemoves = u.unpack_uint()
self.dot5StatsSingles = u.unpack_uint()
self.dot5StatsFreqErrors = u.unpack_uint()
class VGCounters(object):
"""Counters for AnyLan frames
"""
def __init__(self, u):
self.dot5StatsLineErrors = u.unpack_uint()
self.dot5StatsBurstErrors = u.unpack_uint()
self.dot5StatsACErrors = u.unpack_uint()
self.dot5StatsAbortTransErrors = u.unpack_uint()
self.dot5StatsInternalErrors = u.unpack_uint()
self.dot5StatsLostFrameErrors = u.unpack_uint()
self.dot5StatsReceiveCongestions = u.unpack_uint()
self.dot5StatsFrameCopiedErrors = u.unpack_uint()
self.dot5StatsTokenErrors = u.unpack_uint()
self.dot5StatsSoftErrors = u.unpack_uint()
self.dot5StatsHardErrors = u.unpack_uint()
self.dot5StatsSignalLoss = u.unpack_uint()
self.dot5StatsTransmitBeacons = u.unpack_uint()
self.dot5StatsRecoverys = u.unpack_uint()
self.dot5StatsLobeWires = u.unpack_uint()
self.dot5StatsRemoves = u.unpack_uint()
self.dot5StatsSingles = u.unpack_uint()
self.dot5StatsFreqErrors = u.unpack_uint()
class HostCounters(object):
"""Counters for host endpoints
"""
format = 2000
def __init__(self, u):
self.hostname = u.unpack_string()
self.uuid = u.unpack_fopaque(16)
self.machine_type = u.unpack_uint()
self.os_name = u.unpack_uint()
self.os_release = u.unpack_string()
class HostAdapters(object):
"""Counters for HBAs
"""
format = 2001
def __init__(self, u):
self.adapters = Struct("adapters",
UBInt32("count"),
Array(lambda c: c.count,
Struct("adapter",
UBInt32("index"),
Bytes("MAC", 6)))
).parse(u.get_buffer())
class HostParent(object):
"""Counters for hosts
"""
format = 2002
def __init__(self, u):
self.container_type = u.unpack_uint()
self.container_index = u.unpack_uint()
class HostCPUCounters(object):
"""Counters for host CPU stats
"""
format = 2003
def __init__(self, u):
self.load_one = u.unpack_float()
self.load_five = u.unpack_float()
self.load_fifteen = u.unpack_float()
self.proc_run = u.unpack_uint()
self.proc_total = u.unpack_uint()
self.cpu_num = u.unpack_uint()
self.cpu_speed = u.unpack_uint()
self.uptime = u.unpack_uint()
self.cpu_user = u.unpack_uint()
self.cpu_nice = u.unpack_uint()
self.cpu_system = u.unpack_uint()
self.cpu_idle = u.unpack_uint()
self.cpu_wio = u.unpack_uint()
self.cpu_intr = u.unpack_uint()
self.cpu_sintr = u.unpack_uint()
self.interrupts = u.unpack_uint()
self.contexts = u.unpack_uint()
class HostMemoryCounters(object):
"""Counters for host memory
"""
format = 2004
def __init__(self, u):
self.mem_total = u.unpack_uhyper()
self.mem_free = u.unpack_uhyper()
self.mem_shared = u.unpack_uhyper()
self.mem_buffers = u.unpack_uhyper()
self.mem_cached = u.unpack_uhyper()
self.swap_total = u.unpack_uhyper()
self.swap_free = u.unpack_uhyper()
self.page_in = u.unpack_uint()
self.page_out = u.unpack_uint()
self.swap_in = u.unpack_uint()
self.swap_out = u.unpack_uint()
class DiskIOCounters(object):
"""Counters for disk IO
"""
format = 2005
def __init__(self, u):
self.disk_total = u.unpack_uhyper()
self.disk_free = u.unpack_uhyper()
self.part_max_used = u.unpack_uint()
self.reads = u.unpack_uint()
self.bytes_read = u.unpack_uhyper()
self.read_time = u.unpack_uint()
self.writes = u.unpack_uint()
self.bytes_written = u.unpack_uhyper()
self.write_time = u.unpack_uint()
class NetIOCounters(object):
"""Counters for network interface IO
"""
format = 2006
def __init__(self, u):
self.bytes_in = u.unpack_uhyper()
self.pkts_in = u.unpack_uint()
self.errs_in = u.unpack_uint()
self.drops_in = u.unpack_uint()
self.bytes_out = u.unpack_uhyper()
self.packets_out = u.unpack_uint()
self.errs_out = u.unpack_uint()
self.drops_out = u.unpack_uint()
class SocketIPv4Counters(object):
"""Counters for IPv4 sockets
"""
format = 2100
def __init__(self, u):
self.protocol = u.unpack_uint()
self.local_ip = u.unpack_fstring(4)
self.remote_ip = u.unpack_fstring(4)
self.local_port = u.unpack_uint()
self.remote_port = u.unpack_uint()
class SocketIPv6Counters(object):
"""Counters for IPv6 sockets
"""
format = 2101
def __init__(self, u):
self.protocol = u.unpack_uint()
self.local_ip = u.unpack_fstring(16)
self.remote_ip = u.unpack_fstring(16)
self.local_port = u.unpack_uint()
self.remote_port = u.unpack_uint()
class VirtMemoryCounters(object):
"""Counters for virtual memory
"""
format = 2102
def __init__(self, u):
self.memory = u.unpack_uhyper()
self.maxMemory = u.unpack_uhyper()
class VirtDiskIOCounters(object):
"""Counters for virtual disk IO
"""
format = 2103
def __init__(self, u):
self.capacity = u.unpack_uhyper()
self.allocation = u.unpack_uhyper()
self.available = u.unpack_uhyper()
self.rd_req = u.unpack_uint()
self.hyper = u.unpack_unsigend()
self.wr_req = u.unpack_uint()
self.wr_bytes = u.unpack_uhyper()
self.errs = u.unpack_uint()
class VirtNetIOCounters(object):
"""Counters for virtual network adapters
"""
format = 2104
def __init__(self, u):
self.rx_bytes = u.unpack_uhyper()
self.rx_packets = u.unpack_uint()
self.rx_errs = u.unpack_uint()
self.rx_drop = u.unpack_uint()
self.tx_bytes = u.unpack_uhyper()
self.tx_packets = u.unpack_uint()
self.tx_errs = u.unpack_uint()
self.tx_drop = u.unpack_uint()
def getDecoder(fmt):
"""Retrieve the decoder associated with a frame format ID
"""
decoders = {
1: InterfaceCounters,
2: EthernetCounters,
3: TokenringCounters,
4: VGCounters,
5: VLANCounters,
2000: HostCounters,
2001: HostAdapters,
2002: HostParent,
2003: HostCPUCounters,
2004: HostMemoryCounters,
2005: DiskIOCounters,
2006: NetIOCounters,
2101: SocketIPv6Counters,
2102: VirtMemoryCounters,
2103: VirtDiskIOCounters,
2104: VirtNetIOCounters
}
return decoders.get(fmt, None)
|
cthit/CodeIT
|
src/level/Level.py
|
import configparser
import importlib.util
import random
from pdb import set_trace
from warnings import warn
import numpy as np
import pygame
from PIL import Image
import os
from behaviours.Collide import Collide
from src.utils.CodeItWarning import CodeItWarning
from tiles.base.Tile import Tile
level_paths = {}
level_configs = {}
level_backgrounds = {}
entity_map = {}
image_file_formats = [".png", ".jpg", ".jpeg", ".bmp"]
def constructor_factory(constructor, name):
return lambda x, y: constructor(x, y, name)
def load_entity_map():
for path in [f.path for f in os.scandir("../entities") if f.is_dir()]:
path = [x for x in os.scandir(path) if os.path.splitext(x)[1] == ".py"][0]
class_name = os.path.splitext(os.path.basename(path))[0]
if ' ' in class_name:
raise ValueError("Entities cannot have spaces in their names, '" + class_name + "'")
spec = importlib.util.spec_from_file_location("dynamic_load.entities." + class_name, path)
foo = importlib.util.module_from_spec(spec)
spec.loader.exec_module(foo)
class_constructor = getattr(foo, class_name)
entity_map[class_name] = constructor_factory(class_constructor, class_name)
for path in [f.path for f in os.scandir("../tiles") if f.is_dir()]:
paths = [x for x in os.scandir(path) if os.path.splitext(x)[1] in image_file_formats]
if len(paths) <= 0:
continue
path = paths[0]
class_name = os.path.splitext(os.path.basename(path))[0]
if ' ' in class_name:
raise ValueError("Tiles cannot have spaces in their names, '" + class_name + "'")
entity_map[class_name] = constructor_factory(Tile, class_name)
load_entity_map()
def load_levels():
for path in [f.path for f in os.scandir("../levels") if f.is_dir()]:
config = configparser.ConfigParser()
config.read(path + "/config.ini")
level_name = config["General"]["Name"]
level_paths[level_name] = path
level_configs[level_name] = config
paths = [x for x in os.scandir(path) if "background" in os.path.splitext(x)[0]]
if len(paths) <= 0:
continue
path = paths[0]
level_backgrounds[level_name] = pygame.image.load(os.path.relpath(path, os.getcwd()))
def get_level_by_index(index):
level_list = []
for level_name in level_configs.keys():
level_index = int(level_configs[level_name]["General"]["Index"])
if level_index == index:
level_list.append(level_name)
if len(level_list) <= 0:
return
if len(level_list) == 1:
return Level(level_list[0])
return Level(random.choice(level_list))
load_levels()
def hex_to_rgb(h):
return tuple(int(h[i:i + 2], 16) for i in (0, 2, 4))
def rgb_to_hex(rgb: tuple) -> str:
return ('%02x%02x%02x' % rgb).upper()
def load_config(path):
config = configparser.ConfigParser()
config.read(path)
return config
default_color_map = load_config("../levels/default/color-map.ini")
# returns a dictionary that maps rgb tuples to entity constructors
# @param entity_map: a dictionary that maps entity names to entity constructors.
def get_color_map(level):
color_map = {}
config = combine_configs(load_config(level.path + "/color-map.ini"), default_color_map)
for key in config["Colors"]:
color_map[hex_to_rgb(key)] = (config["Colors"][key], entity_map[config["Colors"][key]])
return color_map
def combine_configs(c1, c2):
combined = {}
for category in list(set().union(c1.keys(), c2.keys())):
combined[category] = {**(c2[category] if category in c2 else {}), **(c1[category] if category in c1 else {})}
return combined
def load_map_image(level):
arr3d = np.array(Image.open(level.path + "/map.bmp"))
return arr3d, arr3d.shape[:2]
def load_entities(color_map, map_image, image_shape):
entities = []
entity_lookup = {}
tiles = []
for ix, iy in np.ndindex(image_shape):
rgb = tuple(map_image[ix, iy])
if rgb in color_map:
entity = color_map[rgb][1](iy, ix)
entity_name = color_map[rgb][0]
if isinstance(entity, Tile):
tiles.append(entity)
else:
entity.remain_on_reset = True
if entity_name in entity_lookup:
entity_lookup[entity_name].append(entity)
else:
entity_lookup[entity_name] = [entity]
entities.append(entity)
else:
warn("CodeIT:: No Tile or entity mapped to " + rgb_to_hex(rgb), category=CodeItWarning, stacklevel=0)
return entities, tiles, entity_lookup
class Level:
def __init__(self, name):
if name == "Default":
raise ValueError("Do not instantiate the default level")
self.path = level_paths[name]
self.config = combine_configs(level_configs[name], level_configs["Default"])
self.name = name
self.entities = list()
self.tiles = list()
self.color_map = {}
self.map_image = None
self.map_shape = (0, 0)
self.background = None
self.entity_lookup_map = {}
self.graveyard = []
def load(self):
try:
if self.config["General"]["type"] == "Pure GUI":
return
except KeyError:
pass
self.color_map = get_color_map(self)
self.map_image, self.map_shape = load_map_image(self)
self.entities, self.tiles, self.entity_lookup_map = load_entities(self.color_map, self.map_image,
self.map_shape)
self.level_time = int(self.config["General"]["timelimit"])
if self.name in level_backgrounds:
self.background = level_backgrounds[self.name]
else:
self.background = level_backgrounds["Default"]
return
def kill_entity(self, entity):
if entity in self.entities:
self.entities.remove(entity)
entity.get_behaviour(Collide).clear()
entity.is_dead = True
if entity.remain_on_reset:
self.graveyard.append(entity)
def revive_entities(self):
self.entities.extend(self.graveyard)
for entity in self.graveyard:
entity.is_dead = False
self.graveyard.clear()
def get_entities(self, entity_name):
if entity_name in self.entity_lookup_map:
return self.entity_lookup_map[entity_name]
raise Exception("Invalid entity!, " + entity_name)
def get_y(self, number, unit):
if unit == "percent":
return self.map_shape[1] * number / 100
return number
def get_x(self, number, unit):
if unit == "percent":
return self.map_shape[0] * number / 100
return number
def spawn_entity(self, entity_class, x, y):
entity_name = entity_class.__name__
entity = entity_class(x, y, entity_name)
self.entities.append(entity)
if entity_name in self.entity_lookup_map:
self.entity_lookup_map[entity_name].append(entity)
else:
self.entity_lookup_map[entity_name] = [entity]
return entity
def clear(self):
for tile in self.tiles:
tile.clear()
self.tiles.clear()
for entity in self.entities:
entity.clear()
self.entities.clear()
self.entity_lookup_map.clear()
def __del__(self):
self.clear()
|
letops/django-sendgrid-parse
|
django_sendgrid_parse/migrations/0002_auto_20160729_1816.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-07-29 18:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_sendgrid_parse', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='email',
old_name='to',
new_name='to_mailbox',
),
migrations.AddField(
model_name='email',
name='from_mailbox',
field=models.TextField(default=''),
preserve_default=False,
),
]
|
pmcnett/pmcalendar
|
pmcalendar/ui.py
|
import datetime
import calendar
import dabo
dabo.ui.loadUI("wx")
from dabo.ui import dForm, dPanel, dSizer, dGridSizer, dButton, dEditBox, \
dTextBox, dControlMixin, callAfterInterval, dKeys, \
dLabel, dHyperLink
from dabo.lib.dates import goMonth, goDate
import biz
__all__ = ["FrmCalendar"]
class BaseMixin(dControlMixin):
"""Base behavior for all the relevant controls."""
def initProperties(self):
self.BorderStyle = None
def onGotFocus(self, evt):
callAfterInterval(10, self.Parent.gotFocus)
def onLostFocus(self, evt):
callAfterInterval(10, self.Parent.lostFocus)
class PnlNavigation(dPanel):
def afterInit(self):
hs = self.Sizer = dSizer("h")
but_props = {"FontBold": True, "ShowInBrowser": False,
"OnHit": self.onHit_but, "VisitedUnderline": False,
"LinkUnderline": False, "VisitedColor": "black",
"HoverUnderline": False, "LinkColor": "black"}
left_but = dHyperLink(self, Name="butLeft", Caption=" < ", **but_props)
right_but = dHyperLink(self, Name="butRight", Caption=" > ", **but_props)
lbl = dLabel(self, Name="lblMonthYear", FontBold=True)
hs.appendSpacer(20)
hs.append(left_but)
hs.appendSpacer(10)
hs.append(lbl, alignment="middle")
hs.appendSpacer(10)
hs.append(right_but)
hs.appendSpacer(20)
def setCaption(self, val):
self.lblMonthYear.Caption = val
self.layout()
callAfterInterval(100, self.Parent.Sizer.layout)
def onHit_but(self, evt):
pnlLayout = self.Form.pnlLayout
interval = {"butLeft": -1, "butRight": 1,}[evt.EventObject.Name]
new_date = goMonth(datetime.date(pnlLayout.Year, pnlLayout.Month, 1),
interval)
pnlLayout.Year = new_date.year
pnlLayout.Month = new_date.month
pnlLayout.setFocus()
class DummyTextBox(dTextBox, BaseMixin):
"""Invisible textbox simply to receive and respond to user actions."""
def initProperties(self):
## Dabo doesn't yet offer an interface to set wx.TE_PROCESS_ENTER.
import wx
self.Size = (0, 0)
self._addWindowStyleFlag(wx.TE_PROCESS_ENTER)
def onKeyDown(self, evt):
self.Parent.processDayKeyDown(evt)
class EditMixin(BaseMixin):
"""Base behavior for the edit boxes."""
def initProperties(self):
super(EditMixin, self).initProperties()
self.Height = 5
self.Width = 5
self.DynamicBackColor = self.getBackColor
def onLostFocus(self, evt):
super(EditMixin, self).onLostFocus(evt)
self.save()
def save(self):
"""Subclasses must override if they want entered data to be saved."""
pass
def getBackColor(self):
"""Color depends on whether this day is in the current month."""
try:
return (
"white" if self.Parent.Date.month == self.Parent.Parent.Month
else "lightgrey")
except StandardError:
return "darkgrey"
class Day(dLabel):
"""The day number in the upper-left of each day panel."""
def initProperties(self):
self.Width = 30
self.Height = 23
self.Name = "day"
self.ForeColor = "blue"
def onMouseLeftClick(self, evt):
self.Parent.setFocus()
def onMouseLeftDoubleClick(self, evt):
self.Parent.diary.setFocus()
class Static(dTextBox, EditMixin):
"""View/edit/save the diary entries that repeat on this day every year."""
def initProperties(self):
self.FontItalic = True
self.Name = "static"
super(Static, self).initProperties()
def save(self):
cal = self.Parent.Parent
day = self.Parent
bizStatic = cal.bizStatic
monthday = "%s%s" % (day.Date.month, day.Date.day)
if not bizStatic.locate(monthday, "monthday"):
bizStatic.new()
bizStatic.Record.monthday = monthday
bizStatic.Record.diary = self.Value
bizStatic.save()
class DiaryView(dEditBox, EditMixin):
"""The view of the day's diary (no unneeded scrollbars)."""
def initProperties(self):
## dabo doesn't yet allow overriding the display of scrollbars.
import wx
self.Name = "diary"
self._addWindowStyleFlag(wx.TE_NO_VSCROLL)
super(DiaryView, self).initProperties()
def onGotFocus(self, evt):
"""Switch to the edit control (with scrollbars)."""
edit = self.Parent.diaryedit
edit.Value = self.Value
edit.Visible = True
self.Visible = False
self.Parent.layout()
edit.setFocus()
def onLostFocus(self, evt):
## overridden to avoid unneccesary save()
pass
class DiaryEdit(dEditBox, EditMixin):
"""The editable view of the day's diary (with scrollbars)."""
def initProperties(self):
self.Name = "diaryedit"
self.Visible = False
super(DiaryEdit, self).initProperties()
def save(self):
cal = self.Parent.Parent
day = self.Parent
bizDaily = cal.bizDaily
date = day.Date
if not bizDaily.locate(date, "date"):
bizDaily.new()
bizDaily.Record.date = date
bizDaily.Record.diary = self.Value
bizDaily.save()
def onLostFocus(self, evt):
super(DiaryEdit, self).onLostFocus(evt)
view = self.Parent.diary
view.Value = self.Value
view.Visible = True
self.Visible = False
self.Value = ""
self.Parent.layout()
class PnlDay(dPanel):
"""Panel of a calendar day, including the day number and edit controls."""
def initProperties(self):
self.BorderStyle = "Raised"
self.DynamicBackColor = self.getBackColor
self._hadFocus = False
def afterInit(self):
self.dummy = DummyTextBox(self)
vs = self.Sizer = dSizer("v")
hs = dSizer("h")
hs.append(Day(self), "expand")
hs.append1x(Static(self))
vs.append(hs, "expand")
vs.append1x(DiaryView(self))
vs.append1x(DiaryEdit(self))
parent = self.Parent
diary = self.diary
self.key_actions = {
(dKeys.key_Enter, dKeys.key_Numpad_enter): (diary.setFocus, ()),
(ord("t"), ord("T")): (parent.setFocusToToday, ()),
(ord("+"), ord("=")): (self.setFocusToInterval, ("days", 1)),
(ord("-"),): (self.setFocusToInterval, ("days", -1)),
(ord("["),): (self.setFocusToInterval, ("months", -1)),
(ord("]"),): (self.setFocusToInterval, ("months", 1))}
def gotFocus(self):
day = self.day
dummy = self.dummy
if not self._hadFocus:
day.FontBold = True
day.ForeColor = "darkblue"
self._hadFocus = True
day.refresh()
def lostFocus(self):
if self.Form.ActiveControl not in self.Children:
self._hadFocus = False
day = self.day
day.FontBold = False
day.ForeColor = "blue"
self.refresh()
def setFocus(self):
self.dummy.setFocus()
def setFocusToInterval(self, mode, interval):
"""Go forward or backward by months or days."""
assert mode in ("days", "months")
func = {"days": goDate, "months": goMonth}[mode]
date = self.Date
self.Parent.setFocusToDate(func(date, interval))
def processDayKeyDown(self, evt):
"""User is navigating the calendar; respond appropriately."""
evtData = evt.EventData
kc = evtData["keyCode"]
ctrlDown = evtData["controlDown"]
layout = self.Form.CalendarLayout
for keys, func_args in self.key_actions.items():
if kc in keys:
func, args = func_args
func(*args)
evt.stop()
return
if kc not in [dKeys.key_Up, dKeys.key_Down,
dKeys.key_Left, dKeys.key_Right]:
return
evt.stop()
if not ctrlDown:
# move by day, wrapping around in the calendar
x,y = self.Pos
if kc == dKeys.key_Up and layout in ("month",):
y -= 1
if y < 0:
y = 5
elif kc == dKeys.key_Down and layout in ("month",):
y += 1
if y > 5:
y = 0
elif kc == dKeys.key_Left:
x -= 1
if x < 0:
x = 6
elif kc == dKeys.key_Right:
x += 1
if x > 6:
x = 0
new_ctrl = getattr(self.Parent, "day_%s_%s" % (x,y))
new_ctrl.setFocus()
else:
year, month = self.Parent.Year, self.Parent.Month
current_date = datetime.date(year, month, 1)
if kc == dKeys.key_Left:
new_date = goMonth(current_date, -1)
elif kc == dKeys.key_Right:
new_date = goMonth(current_date, 1)
elif kc == dKeys.key_Up:
new_date = goMonth(current_date, -12)
elif kc == dKeys.key_Down:
new_date = goMonth(current_date, 12)
self.Parent.Year = new_date.year
self.Parent.Month = new_date.month
def getBackColor(self):
try:
return ("white" if self.Date.month == self.Parent.Month
else "lightgrey")
except StandardError:
return "darkgrey"
def _getPos(self):
return self._pos
def _setPos(self, val):
self._pos = val
self.Name = "day_%s_%s" % val
def _getDate(self):
return self._date
def _setDate(self, val):
self._date = val
self.day.Caption = str(val.day)
Pos = property(_getPos, _setPos)
Date = property(_getDate, _setDate)
class PnlLayout(dPanel):
"""Superclass to handle common elements for views (day, month, week)."""
_week_range = None
def afterInit(self):
con = self.Form.Connection
gs = self.Sizer = dGridSizer(MaxCols=7)
self.bizStatic = biz.BizStatic(con)
self.bizDaily = biz.BizDaily(con)
header = calendar.weekheader(3).split()
for x in header:
gs.append(dLabel(self, Caption=x), alignment="center")
for y in range(self._week_range):
for x in range(7):
gs.append(PnlDay(self, Pos=(x,y)), "expand")
gs.setColExpand(True, x)
gs.setRowExpand(True, y+1)
self.setFocusToToday()
def afterDateChanged(self):
self.setFormCaption()
self.setDays()
def setFormCaption(self):
current_date = datetime.date(self.Year, self.Month, 1)
self.Form.setCaption("%s %s" % (
current_date.strftime(calendar.month_name.format), self.Year))
def setDays(self):
mv = biz.getMonthMatrix(self.Year, self.Month)
bizStatic = self.bizStatic
bizStatic.requery()
bizDaily = self.bizDaily
bizDaily.requery_for_dates(mv[0][0], mv[-1][-1])
self.date_obj_map = {}
for y in range(self._week_range):
for x in range(7):
o = getattr(self, "day_%s_%s" % (x,y))
o.Date = mv[y][x]
if bizStatic.locate("%s%s" % (
o.Date.month, o.Date.day), "monthday"):
o.static.Value = bizStatic.Record.diary
else:
o.static.Value = ""
if bizDaily.locate(o.Date, "date"):
o.diary.Value = bizDaily.Record.diary
else:
o.diary.Value = ""
self.date_obj_map[o.Date] = o
self.update()
def setFocusToToday(self):
"""Requerying the calendar if necessary, place cursor on today."""
self.setFocusToDate(datetime.date.today())
def setFocusToDate(self, date):
"""Requerying the calendar if necessary, place cursor on date."""
try:
self.date_obj_map[date].setFocus()
except (KeyError, AttributeError):
self.Year = date.year
self.Month = date.month
callAfterInterval(75, self.setFocusToDate, date)
def _getMonth(self):
return self._month
def _setMonth(self, val):
self._month = val
callAfterInterval(50, self.afterDateChanged)
def _getYear(self):
return self._year
def _setYear(self, val):
self._year = val
callAfterInterval(50, self.afterDateChanged)
Month = property(_getMonth, _setMonth)
Year = property(_getYear, _setYear)
class PnlMonth(PnlLayout):
"""Standard 7x6 month layout."""
_week_range = 6
class PnlWeek(PnlLayout):
"""7x1 week layout (INCOMPLETE)."""
_week_range = 1
class FrmCalendar(dForm):
"""The main containing form of the calendar."""
def afterInit(self):
self._appendCaption = ""
dcon = self.Connection
if dcon is None:
# use in-memory test sqlite database
dcon = self.Connection = dabo.db.connect(":memory:")
con = dcon._connection
con.executescript(open("./create_tables.sql").read())
self._appendCaption = "Temporary Database"
self._instantiatedLayouts = {}
vs = self.Sizer = dSizer("v")
vs.appendSpacer(5)
vs.append(PnlNavigation(self, Name="pnlNavigation"), alignment="center")
vs.appendSpacer(5)
self.updateLayout()
def updateLayout(self):
"""Draw the calendar on screen depending on self.Layout."""
pnls = self._instantiatedLayouts
PnlClass = {"month": PnlMonth, "week": PnlWeek}[self.CalendarLayout]
vs = self.Sizer
for pnl in pnls.values():
pnl.Visible = False
pnl = self.pnlLayout = pnls.setdefault(PnlClass, PnlClass(self))
if pnl not in vs.ChildWindows:
vs.append1x(pnl)
self.layout()
def setCaption(self, val):
appendCaption = self._appendCaption
if appendCaption:
appendCaption = "[%s]" % appendCaption
self.Caption = "%s %s" % (val, appendCaption)
self.pnlNavigation.setCaption(val)
def _getConnection(self):
return getattr(self, "_connection", None)
def _setConnection(self, val):
self._connection = val
def _getCalendarLayout(self):
return getattr(self, "_calendar_layout", "month")
def _setCalendarLayout(self, val):
self._calendar_layout = val
assert val in ("month", "week")
dabo.ui.callAfterInterval(10, self.updateLayout())
CalendarLayout = property(_getCalendarLayout, _setCalendarLayout, None,
"""Either "month" or "week".""")
Connection = property(_getConnection, _setConnection, None,
"Dabo dConnection instance.")
if __name__ == "__main__":
# If you want Sunday to be the first weekday, you need code like
# the following commented lines in your application prior to
# importing pmcalendar:
#import calendar
#calendar.setfirstweekday(6)
dabo.dApp(MainFormClass=FrmCalendar).start()
|
DavidFnck/Python_Stock_Github
|
news/news/items.py
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class NewsItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
# pass
news_thread=scrapy.Field()
news_title = scrapy.Field()
news_url = scrapy.Field()
news_time=scrapy.Field()
news_from=scrapy.Field()
from_url=scrapy.Field()
news_body=scrapy.Field()
|
bradfeehan/SublimePHPCoverage
|
php_coverage/command.py
|
import sublime_plugin
from php_coverage.data import CoverageDataFactory
from php_coverage.finder import CoverageFinder
from php_coverage.matcher import Matcher
class CoverageCommand(sublime_plugin.TextCommand):
"""
Base class for a text command which has a coverage file.
"""
def __init__(self, view, coverage_finder=None, matcher=None):
super(CoverageCommand, self).__init__(view)
self.coverage_finder = coverage_finder
self.matcher = matcher
def get_coverage_finder(self):
"""
Gets the coverage finder for the command. If none is set, it
instantiates an instance of the default CoverageFinder class.
"""
if not self.coverage_finder:
self.coverage_finder = CoverageFinder()
return self.coverage_finder
def coverage(self):
"""
Loads coverage data for the file open in the view which is
running this command.
"""
filename = self.view.file_name()
coverage_file = self.get_coverage_finder().find(filename)
if (coverage_file):
return CoverageDataFactory().factory(coverage_file)
return None
def get_matcher(self):
"""
Gets the matcher for the command. If none is set, it
instantiates an instance of the default Matcher class.
"""
if not self.matcher:
self.matcher = Matcher()
return self.matcher
def should_include(self, filename):
"""
Determines whether a file should be included or not.
"""
return self.get_matcher().should_include(filename)
|
msscully/datamart
|
tests/test_roles.py
|
from tests import TestCase
from werkzeug.urls import url_quote
from datamart.models import Role
from flask.ext.security import current_user
class TestRoles(TestCase):
def test_show_roles_anon(self):
"""Verify unathenticated users can't see the roles page."""
response = self.client.get('/roles/', follow_redirects=False)
new_location='/login?next=%s' % url_quote('/roles/', safe='')
self.assertRedirects(response, location=new_location)
response = self.client.get('/roles/', follow_redirects=True)
assert 'Please log in to access this page.' in response.data
self.assertTemplateUsed(name='login.html')
def test_show_roles_non_admin(self):
"""Make sure logged in non-admin users can't see the roles page."""
self.login('demo@example.com','123456')
assert current_user.is_authenticated
response = self._test_get_request('/roles/', 'index.html', follow_redirects=True)
assert 'Permission denied' in response.data
self.logout()
def test_show_roles_admin(self):
"""Make sure logged in admins can see the roles page."""
self.login('admin@example.com','123456')
response = self._test_get_request('/roles/', 'roles.html')
assert 'Please log in to access this page.' not in response.data
self.logout()
def test_role_add(self):
"""Adds a role using a post to /roles/add/"""
self.login('admin@example.com', '123456')
self._test_get_request('/roles/add/', 'role_edit.html')
data = {
'name': 'Ruler',
'description': "of all I survey.",
}
response = self.client.post('/roles/add/', data=data)
assert 'Please fix errors and resubmit.' not in response.data
new_role = Role.query.filter_by(name=data['name'])
assert new_role.count() == 1
self.logout()
def test_role_edit(self):
"""Edit a role using webforms."""
self.login('admin@example.com', '123456')
data = {
'name': 'Ruler',
'description': "of all I survey.",
}
new_role = Role.query.filter_by(name=data['name'])
if new_role.count() != 1:
response = self.client.post('/roles/add/', data=data)
assert 'Please fix errors and resubmit.' not in response.data
new_role = Role.query.filter_by(name=data['name'])
assert new_role.count() == 1
data['name'] = 'Peon'
response = self.client.post('/roles/%s/edit/' % new_role.first().id, data=data)
assert 'Please fix errors and resubmit.' not in response.data
new_role = Role.query.filter_by(name=data['name'])
assert new_role.count() == 1
response = self.client.get('/roles/')
assert data['name'] in response.data
self.logout()
|
nada-labs/sitemap-generator
|
spider.py
|
#
# Walks every page that it can find in a website
#
# It's an excuse to play with BeautifulSoup
import re
from io import BytesIO
from bs4 import BeautifulSoup
from pycurl import Curl
from queue import Queue, Empty as QueueEmpty
from urllib.parse import urlsplit, urlunsplit, urljoin
from sys import stdout
class PageFetcher():
"""Fetches a page"""
def __init__(self):
self.curl = Curl()
self.url = None
self.headers = {}
self.status = ''
self.code = 0
self.charset_re = re.compile('charset=(\S+)')
def handle_headers(self, header):
"""Parses the headers from a HTTP response"""
header = header.decode('iso-8859-1') #headers always in iso-8859-1
if ':' in header:
#split out the headers name and value
n, v = header.split(': ', 1)
self.headers[n] = v.rstrip('\r\n')
elif 'HTTP' in header:
h, code, status = header.split(' ', 2)
self.code = int(code)
self.status = status.rstrip('\r\n')
def encoding(self):
"""Gets the encoding from the headers, otherwise assumes iso-8859-1"""
if 'Content-Type' in self.headers:
match = self.charset_re.search(self.headers['Content-Type'].lower())
if match:
return match.group(1)
return 'iso-8859-1'
def fetch(self, url, headers_only=False):
"""Gets the specified webpage"""
#reset the gathered data
self.headers = {}
self.code = 0
self.status = None
links = []
#get the page
buff = BytesIO()
self.curl.setopt(self.curl.URL, url)
if headers_only:
self.curl.setopt(self.curl.NOBODY, 1)
else:
self.curl.setopt(self.curl.NOBODY, 0)
self.curl.setopt(self.curl.WRITEDATA, buff)
self.curl.setopt(self.curl.HEADERFUNCTION, self.handle_headers)
self.curl.perform()
#decode the returned data to the correct type
body = buff.getvalue().decode(self.encoding())
return self.code, self.headers, body
class Spider:
"""Fetches every page within a website"""
def __init__(self):
self.htmlpage = re.compile('text/(html|xml)|application/(xhtml\+xml|xml)')
def walk(self, url, pagefilter = None):
"""Walks all pages in the given website"""
if not url.startswith('http'):
url = 'http://' + url
fetch = PageFetcher()
queued = Queue()
queued.put(url)
processed = set()
while not queued.empty():
url = queued.get()
if url not in processed:
have_body = False
processed.add(url)
code, headers, body = fetch.fetch(url, True) #only get headers
if code >= 300 and code <= 399 and 'Location' in headers:
locpart = urlsplit(headers['Location'])
urlpart = urlsplit(url)
if locpart.scheme == urlpart.scheme and locpart.netloc == urlpart.netloc:
queued.put(headers['Location'])
#parse out the links if it's a html page
if self.ishtml(headers):
code, headers, body = fetch.fetch(url) #get the whole page
have_body = True
links = self.sitelinks(body, url)
for l in links:
queued.put(l)
#pass the page onto the processor, if it passes the defined filter
if not pagefilter or (pagefilter and pagefilter(code, headers)):
if not have_body:
code, headers, body = fetch.fetch(url) #get the whole page
self.process_page(url, code, headers, body)
def ishtml(self, headers):
'''Determines if the retrieved page is a (x)html page'''
if 'Content-Type' in headers and self.htmlpage.search(headers['Content-Type']):
return True
return False
def sitelinks(self, html_page, url):
"""Finds all links in the provided html page"""
bs = BeautifulSoup(html_page)
links = set()
urlpart = urlsplit(url)
try:
for anchor in bs.find_all('a'):
linkpart = list(urlsplit(anchor['href']))
linkpart[4] = '' #remove the fragment
if linkpart[0] == '':
linkpart[0] = urlpart.scheme
if linkpart[1] == '':
linkpart[1] = urlpart.netloc
if linkpart[0] == urlpart.scheme and linkpart[1] == urlpart.netloc:
if linkpart[2].startswith('/'):
links.add(urlunsplit(linkpart))
elif linkpart[2] != '':
#relative URL.
links.add(urljoin(url, linkpart[2]))
except KeyError:
pass
return links
def process_page(self, url, code, headers, body):
'''Does things with the retrieved page'''
pass
|
jwinzer/OpenSlides
|
server/tests/integration/motions/test_polls.py
|
from decimal import Decimal
import pytest
from django.conf import settings
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from openslides.core.config import config
from openslides.motions.models import Motion, MotionOption, MotionPoll, MotionVote
from openslides.poll.models import BasePoll
from openslides.utils.auth import get_group_model
from openslides.utils.autoupdate import inform_changed_data
from tests.common_groups import GROUP_ADMIN_PK, GROUP_DEFAULT_PK, GROUP_DELEGATE_PK
from tests.count_queries import count_queries
from tests.test_case import TestCase
@pytest.mark.django_db(transaction=False)
def test_motion_poll_db_queries():
"""
Tests that only the following db queries are done:
* 1 request to get the polls,
* 1 request to get all options for all polls,
* 1 request to get all votes for all options,
* 1 request to get all users for all votes,
* 1 request to get all poll groups,
= 5 queries
"""
create_motion_polls()
assert count_queries(MotionPoll.get_elements)() == 5
@pytest.mark.django_db(transaction=False)
def test_motion_vote_db_queries():
"""
Tests that only 1 query is done when fetching MotionVotes
"""
create_motion_polls()
assert count_queries(MotionVote.get_elements)() == 1
@pytest.mark.django_db(transaction=False)
def test_motion_option_db_queries():
"""
Tests that only the following db queries are done:
* 1 request to get the options,
* 1 request to get all votes for all options,
= 2 queries
"""
create_motion_polls()
assert count_queries(MotionOption.get_elements)() == 2
def create_motion_polls():
"""
Creates 1 Motion with 5 polls with 5 options each which have 2 votes each
"""
motion = Motion.objects.create(title="test_motion_wfLrsjEHXBmPplbvQ65N")
group1 = get_group_model().objects.get(pk=1)
group2 = get_group_model().objects.get(pk=2)
for index in range(5):
poll = MotionPoll.objects.create(
motion=motion, title=f"test_title_{index}", pollmethod="YN", type="named"
)
poll.groups.add(group1)
poll.groups.add(group2)
for j in range(5):
option = MotionOption.objects.create(poll=poll)
for k in range(2):
user = get_user_model().objects.create_user(
username=f"test_username_{index}{j}{k}",
password="test_password_kbzj5L8ZtVxBllZzoW6D",
)
MotionVote.objects.create(
user=user,
option=option,
value=("Y" if k == 0 else "N"),
weight=Decimal(1),
)
poll.voted.add(user)
class CreateMotionPoll(TestCase):
"""
Tests creating polls of motions.
"""
def advancedSetUp(self):
self.motion = Motion(
title="test_title_Aiqueigh2dae9phabiqu",
text="test_text_Neekoh3zou6li5rue8iL",
)
self.motion.save()
def test_simple(self):
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_ailai4toogh3eefaa2Vo",
"pollmethod": "YNA",
"type": "named",
"motion_id": self.motion.id,
"onehundred_percent_base": "YN",
"majority_method": "simple",
},
)
self.assertHttpStatusVerbose(response, status.HTTP_201_CREATED)
self.assertTrue(MotionPoll.objects.exists())
poll = MotionPoll.objects.get()
self.assertEqual(poll.title, "test_title_ailai4toogh3eefaa2Vo")
self.assertEqual(poll.pollmethod, "YNA")
self.assertEqual(poll.type, "named")
self.assertEqual(poll.motion.id, self.motion.id)
self.assertTrue(poll.options.exists())
def test_default_method(self):
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_ailai4toogh3eefaa2Vo",
"type": "named",
"motion_id": self.motion.id,
"onehundred_percent_base": "YN",
"majority_method": "simple",
},
)
self.assertHttpStatusVerbose(response, status.HTTP_201_CREATED)
self.assertTrue(MotionPoll.objects.exists())
poll = MotionPoll.objects.get()
self.assertEqual(poll.pollmethod, "YNA")
def test_autoupdate(self):
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_9Ce8OsdB8YWTVm5YOzqH",
"pollmethod": "YNA",
"type": "named",
"motion_id": self.motion.id,
"onehundred_percent_base": "YN",
"majority_method": "simple",
},
)
self.assertHttpStatusVerbose(response, status.HTTP_201_CREATED)
autoupdate = self.get_last_autoupdate(user=self.admin)
self.assertEqual(
autoupdate[0]["motions/motion-poll:1"],
{
"motion_id": 1,
"pollmethod": MotionPoll.POLLMETHOD_YNA,
"state": MotionPoll.STATE_CREATED,
"type": MotionPoll.TYPE_NAMED,
"title": "test_title_9Ce8OsdB8YWTVm5YOzqH",
"onehundred_percent_base": MotionPoll.PERCENT_BASE_YN,
"majority_method": MotionPoll.MAJORITY_SIMPLE,
"groups_id": [],
"votesvalid": "0.000000",
"votesinvalid": "0.000000",
"votescast": "0.000000",
"options_id": [1],
"id": 1,
"voted_id": [],
"user_has_voted": False,
"user_has_voted_for_delegations": [],
},
)
self.assertEqual(autoupdate[1], [])
def test_missing_keys(self):
complete_request_data = {
"title": "test_title_OoCh9aitaeyaeth8nom1",
"type": "named",
"motion_id": self.motion.id,
"onehundred_percent_base": "YN",
"majority_method": "simple",
}
for key in complete_request_data.keys():
request_data = {
_key: value
for _key, value in complete_request_data.items()
if _key != key
}
response = self.client.post(reverse("motionpoll-list"), request_data)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.exists())
def test_with_groups(self):
group1 = get_group_model().objects.get(pk=1)
group2 = get_group_model().objects.get(pk=2)
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_Thoo2eiphohhi1eeXoow",
"pollmethod": "YNA",
"type": "named",
"motion_id": self.motion.id,
"onehundred_percent_base": "YN",
"majority_method": "simple",
"groups_id": [1, 2],
},
)
self.assertHttpStatusVerbose(response, status.HTTP_201_CREATED)
poll = MotionPoll.objects.get()
self.assertTrue(group1 in poll.groups.all())
self.assertTrue(group2 in poll.groups.all())
def test_with_empty_groups(self):
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_Thoo2eiphohhi1eeXoow",
"pollmethod": MotionPoll.POLLMETHOD_YNA,
"type": MotionPoll.TYPE_NAMED,
"motion_id": self.motion.id,
"onehundred_percent_base": MotionPoll.PERCENT_BASE_YN,
"majority_method": MotionPoll.MAJORITY_SIMPLE,
"groups_id": [],
},
)
self.assertHttpStatusVerbose(response, status.HTTP_201_CREATED)
poll = MotionPoll.objects.get()
self.assertFalse(poll.groups.exists())
def test_not_supported_type(self):
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_yaiyeighoh0Iraet3Ahc",
"pollmethod": MotionPoll.POLLMETHOD_YNA,
"type": "not_existing",
"motion_id": self.motion.id,
"onehundred_percent_base": MotionPoll.PERCENT_BASE_YN,
"majority_method": MotionPoll.MAJORITY_SIMPLE,
},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.exists())
def test_not_allowed_type(self):
setattr(settings, "ENABLE_ELECTRONIC_VOTING", False)
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_3jdWIXbKBa7ZXutf3RYf",
"pollmethod": MotionPoll.POLLMETHOD_YN,
"type": MotionPoll.TYPE_NAMED,
"motion_id": self.motion.id,
"onehundred_percent_base": MotionPoll.PERCENT_BASE_YN,
"majority_method": MotionPoll.MAJORITY_SIMPLE,
},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.exists())
setattr(settings, "ENABLE_ELECTRONIC_VOTING", True)
def test_not_supported_pollmethod(self):
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_SeVaiteYeiNgie5Xoov8",
"pollmethod": "not_existing",
"type": "named",
"motion_id": self.motion.id,
"onehundred_percent_base": MotionPoll.PERCENT_BASE_YN,
"majority_method": MotionPoll.MAJORITY_SIMPLE,
},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.exists())
def test_create_with_votes(self):
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_0X5LifVkKiSh8OPGQM8e",
"pollmethod": MotionPoll.POLLMETHOD_YN,
"type": MotionPoll.TYPE_ANALOG,
"motion_id": self.motion.id,
"onehundred_percent_base": MotionPoll.PERCENT_BASE_YNA,
"majority_method": MotionPoll.MAJORITY_SIMPLE,
"votes": {
"Y": 1,
"N": 2,
"votesvalid": "-2",
"votesinvalid": "-2",
"votescast": "-2",
},
},
)
self.assertHttpStatusVerbose(response, status.HTTP_201_CREATED)
poll = MotionPoll.objects.get()
self.assertEqual(poll.state, MotionPoll.STATE_FINISHED)
self.assertTrue(MotionVote.objects.exists())
def test_create_with_votes_publish_immediately(self):
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_iXhJX0jmNl3Nvadsi8JO",
"pollmethod": MotionPoll.POLLMETHOD_YN,
"type": MotionPoll.TYPE_ANALOG,
"motion_id": self.motion.id,
"onehundred_percent_base": MotionPoll.PERCENT_BASE_YNA,
"majority_method": MotionPoll.MAJORITY_SIMPLE,
"votes": {
"Y": 1,
"N": 2,
"votesvalid": "-2",
"votesinvalid": "-2",
"votescast": "-2",
},
"publish_immediately": "1",
},
)
self.assertHttpStatusVerbose(response, status.HTTP_201_CREATED)
poll = MotionPoll.objects.get()
self.assertEqual(poll.state, MotionPoll.STATE_PUBLISHED)
self.assertTrue(MotionVote.objects.exists())
def test_create_with_invalid_votes(self):
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_phSl1IALPIoDyM9uI2Kq",
"pollmethod": MotionPoll.POLLMETHOD_YN,
"type": MotionPoll.TYPE_ANALOG,
"motion_id": self.motion.id,
"onehundred_percent_base": MotionPoll.PERCENT_BASE_YNA,
"majority_method": MotionPoll.MAJORITY_SIMPLE,
"votes": {"Y": 1, "N": 2, "votesvalid": "-2", "votesinvalid": "-2"},
"publish_immediately": "1",
},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.exists())
self.assertFalse(MotionVote.objects.exists())
def test_create_with_votes_wrong_type(self):
response = self.client.post(
reverse("motionpoll-list"),
{
"title": "test_title_PgvqRIvuKuVImEpQJAMZ",
"pollmethod": MotionPoll.POLLMETHOD_YN,
"type": MotionPoll.TYPE_NAMED,
"motion_id": self.motion.id,
"onehundred_percent_base": MotionPoll.PERCENT_BASE_YNA,
"majority_method": MotionPoll.MAJORITY_SIMPLE,
"votes": {"Y": 1, "N": 2, "votesvalid": "-2", "votesinvalid": "-2"},
"publish_immediately": "1",
},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.exists())
self.assertFalse(MotionVote.objects.exists())
class UpdateMotionPoll(TestCase):
"""
Tests updating polls of motions.
"""
def setUp(self):
self.client = APIClient()
self.client.login(username="admin", password="admin")
self.motion = Motion(
title="test_title_Aiqueigh2dae9phabiqu",
text="test_text_Neekoh3zou6li5rue8iL",
)
self.motion.save()
self.group = get_group_model().objects.get(pk=1)
self.poll = MotionPoll.objects.create(
motion=self.motion,
title="test_title_beeFaihuNae1vej2ai8m",
pollmethod="YNA",
type="named",
onehundred_percent_base="YN",
majority_method="simple",
)
self.poll.create_options()
self.poll.groups.add(self.group)
def test_patch_title(self):
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"title": "test_title_Aishohh1ohd0aiSut7gi"},
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.title, "test_title_Aishohh1ohd0aiSut7gi")
def test_prevent_patching_motion(self):
motion = Motion(
title="test_title_phohdah8quukooHeetuz",
text="test_text_ue2yeisaech1ahBohhoo",
)
motion.save()
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]), {"motion_id": motion.id}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.motion.id, self.motion.id) # unchanged
def test_patch_pollmethod(self):
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]), {"pollmethod": "YN"}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.pollmethod, "YN")
self.assertEqual(poll.onehundred_percent_base, "YN")
def test_patch_invalid_pollmethod(self):
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]), {"pollmethod": "invalid"}
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
poll = MotionPoll.objects.get()
self.assertEqual(poll.pollmethod, "YNA")
def test_patch_type(self):
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]), {"type": "analog"}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.type, "analog")
def test_patch_invalid_type(self):
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]), {"type": "invalid"}
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
poll = MotionPoll.objects.get()
self.assertEqual(poll.type, "named")
def test_patch_not_allowed_type(self):
setattr(settings, "ENABLE_ELECTRONIC_VOTING", False)
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"type": BasePoll.TYPE_NAMED},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
poll = MotionPoll.objects.get()
self.assertEqual(poll.type, BasePoll.TYPE_NAMED)
setattr(settings, "ENABLE_ELECTRONIC_VOTING", True)
def test_patch_100_percent_base(self):
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"onehundred_percent_base": "cast"},
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.onehundred_percent_base, "cast")
def test_patch_wrong_100_percent_base(self):
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"onehundred_percent_base": "invalid"},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
poll = MotionPoll.objects.get()
self.assertEqual(poll.onehundred_percent_base, "YN")
def test_patch_majority_method(self):
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"majority_method": "two_thirds"},
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.majority_method, "two_thirds")
def test_patch_wrong_majority_method(self):
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"majority_method": "invalid majority method"},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
poll = MotionPoll.objects.get()
self.assertEqual(poll.majority_method, "simple")
def test_patch_groups_to_empty(self):
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]), {"groups_id": []}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertFalse(poll.groups.exists())
def test_patch_groups(self):
group2 = get_group_model().objects.get(pk=2)
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"groups_id": [group2.id]},
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.groups.count(), 1)
self.assertEqual(poll.groups.get(), group2)
def test_patch_title_started(self):
self.poll.state = 2
self.poll.save()
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"title": "test_title_1FjLGeQqsi9GgNzPp73S"},
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.title, "test_title_1FjLGeQqsi9GgNzPp73S")
def test_patch_wrong_state(self):
self.poll.state = 2
self.poll.save()
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"type": BasePoll.TYPE_NAMED},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
poll = MotionPoll.objects.get()
self.assertEqual(poll.type, BasePoll.TYPE_NAMED)
def test_patch_majority_method_state_not_created(self):
self.poll.state = 2
self.poll.save()
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"majority_method": "two_thirds"},
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.majority_method, "two_thirds")
def test_patch_100_percent_base_state_not_created(self):
self.poll.state = 2
self.poll.save()
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"onehundred_percent_base": "cast"},
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.onehundred_percent_base, "cast")
def test_patch_wrong_100_percent_base_state_not_created(self):
self.poll.state = 2
self.poll.pollmethod = MotionPoll.POLLMETHOD_YN
self.poll.save()
response = self.client.patch(
reverse("motionpoll-detail", args=[self.poll.pk]),
{"onehundred_percent_base": MotionPoll.PERCENT_BASE_YNA},
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.onehundred_percent_base, "YN")
class VoteMotionPollAnalog(TestCase):
def setUp(self):
self.client = APIClient()
self.client.login(username="admin", password="admin")
self.motion = Motion(
title="test_title_OoK9IeChe2Jeib9Deeji",
text="test_text_eichui1oobiSeit9aifo",
)
self.motion.save()
self.poll = MotionPoll.objects.create(
motion=self.motion,
title="test_title_tho8PhiePh8upaex6phi",
pollmethod="YNA",
type=BasePoll.TYPE_ANALOG,
)
self.poll.create_options()
def start_poll(self):
self.poll.state = MotionPoll.STATE_STARTED
self.poll.save()
def make_admin_delegate(self):
admin = get_user_model().objects.get(username="admin")
admin.groups.add(GROUP_DELEGATE_PK)
admin.groups.remove(GROUP_ADMIN_PK)
inform_changed_data(admin)
def test_start_poll(self):
response = self.client.post(reverse("motionpoll-start", args=[self.poll.pk]))
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.state, MotionPoll.STATE_STARTED)
self.assertEqual(poll.votesvalid, None)
self.assertEqual(poll.votesinvalid, None)
self.assertEqual(poll.votescast, None)
self.assertFalse(poll.get_votes().exists())
def test_stop_poll(self):
self.start_poll()
response = self.client.post(reverse("motionpoll-stop", args=[self.poll.pk]))
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertEqual(self.poll.state, MotionPoll.STATE_STARTED)
def test_vote(self):
self.start_poll()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]),
{
"data": {
"Y": "1",
"N": "2.35",
"A": "-1",
"votesvalid": "4.64",
"votesinvalid": "-2",
"votescast": "-2",
},
},
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.votesvalid, Decimal("4.64"))
self.assertEqual(poll.votesinvalid, Decimal("-2"))
self.assertEqual(poll.votescast, Decimal("-2"))
self.assertEqual(poll.get_votes().count(), 3)
self.assertEqual(poll.state, MotionPoll.STATE_FINISHED)
option = poll.options.get()
self.assertEqual(option.yes, Decimal("1"))
self.assertEqual(option.no, Decimal("2.35"))
self.assertEqual(option.abstain, Decimal("-1"))
self.assertAutoupdate(poll)
def test_vote_no_permissions(self):
self.start_poll()
self.make_admin_delegate()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": {}}
)
self.assertHttpStatusVerbose(response, status.HTTP_403_FORBIDDEN)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_no_data(self):
self.start_poll()
response = self.client.post(reverse("motionpoll-vote", args=[self.poll.pk]), {})
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_missing_data(self):
self.start_poll()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]),
{"data": {"Y": "4", "N": "22.6"}},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_wrong_data_format(self):
self.start_poll()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": [1, 2, 5]}
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_wrong_vote_data(self):
self.start_poll()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]),
{"data": {"Y": "some string", "N": "-2", "A": "3"}},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_state_finished(self):
self.start_poll()
self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]),
{
"data": {
"Y": "3",
"N": "1",
"A": "5",
"votesvalid": "-2",
"votesinvalid": "1",
"votescast": "-1",
},
},
)
self.poll.state = 3
self.poll.save()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]),
{
"data": {
"Y": "1",
"N": "2.35",
"A": "-1",
"votesvalid": "4.64",
"votesinvalid": "-2",
"votescast": "3",
},
},
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.votesvalid, Decimal("4.64"))
self.assertEqual(poll.votesinvalid, Decimal("-2"))
self.assertEqual(poll.votescast, Decimal("3"))
self.assertEqual(poll.get_votes().count(), 3)
option = poll.options.get()
self.assertEqual(option.yes, Decimal("1"))
self.assertEqual(option.no, Decimal("2.35"))
self.assertEqual(option.abstain, Decimal("-1"))
class VoteMotionPollNamed(TestCase):
def setUp(self):
self.client = APIClient()
self.client.login(username="admin", password="admin")
self.motion = Motion(
title="test_title_OoK9IeChe2Jeib9Deeji",
text="test_text_eichui1oobiSeit9aifo",
)
self.motion.save()
self.group = get_group_model().objects.get(pk=GROUP_DELEGATE_PK)
self.admin = get_user_model().objects.get(username="admin")
self.poll = MotionPoll.objects.create(
motion=self.motion,
title="test_title_tho8PhiePh8upaex6phi",
pollmethod="YNA",
type=BasePoll.TYPE_NAMED,
)
self.poll.create_options()
self.poll.groups.add(self.group)
def start_poll(self):
self.poll.state = MotionPoll.STATE_STARTED
self.poll.save()
def make_admin_delegate(self):
self.admin.groups.add(GROUP_DELEGATE_PK)
self.admin.groups.remove(GROUP_ADMIN_PK)
inform_changed_data(self.admin)
def make_admin_present(self):
self.admin.is_present = True
self.admin.save()
def test_start_poll(self):
response = self.client.post(reverse("motionpoll-start", args=[self.poll.pk]))
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.state, MotionPoll.STATE_STARTED)
self.assertEqual(poll.votesvalid, Decimal("0"))
self.assertEqual(poll.votesinvalid, Decimal("0"))
self.assertEqual(poll.votescast, Decimal("0"))
self.assertFalse(poll.get_votes().exists())
def test_vote(self):
self.start_poll()
self.make_admin_delegate()
self.make_admin_present()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "N"}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.votesvalid, Decimal("1"))
self.assertEqual(poll.votesinvalid, Decimal("0"))
self.assertEqual(poll.votescast, Decimal("1"))
self.assertEqual(poll.get_votes().count(), 1)
option = poll.options.get()
self.assertEqual(option.yes, Decimal("0"))
self.assertEqual(option.no, Decimal("1"))
self.assertEqual(option.abstain, Decimal("0"))
vote = option.votes.get()
self.assertEqual(vote.user, self.admin)
self.assertEqual(vote.weight, Decimal("1"))
def test_vote_with_voteweight(self):
config["users_activate_vote_weight"] = True
self.start_poll()
self.make_admin_delegate()
self.make_admin_present()
self.admin.vote_weight = weight = Decimal("3.5")
self.admin.save()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "A"}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.votesvalid, weight)
self.assertEqual(poll.votesinvalid, Decimal("0"))
self.assertEqual(poll.votescast, Decimal("1"))
self.assertEqual(poll.get_votes().count(), 1)
self.assertEqual(poll.amount_users_voted_with_individual_weight(), weight)
option = poll.options.get()
self.assertEqual(option.yes, Decimal("0"))
self.assertEqual(option.no, Decimal("0"))
self.assertEqual(option.abstain, weight)
vote = option.votes.get()
self.assertEqual(vote.weight, weight)
def test_vote_without_voteweight(self):
self.admin.vote_weight = Decimal("3.5")
self.admin.save()
self.test_vote()
def test_change_vote(self):
self.start_poll()
self.make_admin_delegate()
self.make_admin_present()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "N"}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "A"}
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
poll = MotionPoll.objects.get()
self.assertEqual(poll.votesvalid, Decimal("1"))
self.assertEqual(poll.votesinvalid, Decimal("0"))
self.assertEqual(poll.votescast, Decimal("1"))
self.assertEqual(poll.get_votes().count(), 1)
option = poll.options.get()
self.assertEqual(option.yes, Decimal("0"))
self.assertEqual(option.no, Decimal("1"))
self.assertEqual(option.abstain, Decimal("0"))
vote = option.votes.get()
self.assertEqual(vote.user, self.admin)
def test_vote_anonymous(self):
self.poll.groups.add(GROUP_DEFAULT_PK)
self.start_poll()
config["general_system_enable_anonymous"] = True
guest_client = APIClient()
response = guest_client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "Y"}
)
self.assertHttpStatusVerbose(response, status.HTTP_403_FORBIDDEN)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_wrong_state(self):
self.make_admin_present()
self.make_admin_delegate()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": {}}
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_wrong_group(self):
self.start_poll()
self.make_admin_present()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": {}}
)
self.assertHttpStatusVerbose(response, status.HTTP_403_FORBIDDEN)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_not_present(self):
self.start_poll()
self.make_admin_delegate()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": {}}
)
self.assertHttpStatusVerbose(response, status.HTTP_403_FORBIDDEN)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_missing_data(self):
self.start_poll()
self.make_admin_delegate()
self.make_admin_present()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": {}}
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_wrong_data_format(self):
self.start_poll()
self.make_admin_delegate()
self.make_admin_present()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": [1, 2, 5]}
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def setup_vote_delegation(self, with_delegation=True):
""" user -> admin """
self.start_poll()
self.make_admin_delegate()
self.make_admin_present()
self.user, self.user_password = self.create_user()
self.user.groups.add(GROUP_DELEGATE_PK)
if with_delegation:
self.user.vote_delegated_to = self.admin
self.user.save()
inform_changed_data(self.admin) # put the admin into the cache to update
# its vote_delegated_to_id field
def test_vote_delegation(self):
self.setup_vote_delegation()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]),
{"data": "N", "user_id": self.user.pk}, # user not present
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.votesvalid, Decimal("1"))
self.assertEqual(poll.votesinvalid, Decimal("0"))
self.assertEqual(poll.votescast, Decimal("1"))
self.assertEqual(poll.get_votes().count(), 1)
option = poll.options.get()
self.assertEqual(option.yes, Decimal("0"))
self.assertEqual(option.no, Decimal("1"))
self.assertEqual(option.abstain, Decimal("0"))
vote = option.votes.get()
self.assertEqual(vote.user, self.user)
self.assertEqual(vote.delegated_user, self.admin)
autoupdate = self.get_last_autoupdate(user=self.admin)
self.assertIn("motions/motion-poll:1", autoupdate[0])
self.assertEqual(
autoupdate[0]["motions/motion-poll:1"]["user_has_voted_for_delegations"],
[self.user.pk],
)
def test_vote_delegation_and_self_vote(self):
self.test_vote_delegation()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "Y"}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.votesvalid, Decimal("2"))
self.assertEqual(poll.votesinvalid, Decimal("0"))
self.assertEqual(poll.votescast, Decimal("2"))
self.assertEqual(poll.get_votes().count(), 2)
option = poll.options.get()
self.assertEqual(option.yes, Decimal("1"))
self.assertEqual(option.no, Decimal("1"))
self.assertEqual(option.abstain, Decimal("0"))
vote = option.votes.get(user_id=self.admin.pk)
self.assertEqual(vote.user, self.admin)
self.assertEqual(vote.delegated_user, self.admin)
def test_vote_delegation_forbidden(self):
self.setup_vote_delegation(False)
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]),
{"data": "N", "user_id": self.user.pk},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_delegation_not_present(self):
self.setup_vote_delegation()
self.admin.is_present = False
self.admin.save()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]),
{"data": "N", "user_id": self.user.pk},
)
self.assertHttpStatusVerbose(response, status.HTTP_403_FORBIDDEN)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_delegation_delegate_not_in_group(self):
self.setup_vote_delegation()
self.admin.groups.remove(GROUP_DELEGATE_PK)
self.admin.save()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]),
{"data": "N", "user_id": self.user.pk},
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.get_votes().count(), 1)
vote = poll.get_votes()[0]
self.assertEqual(vote.value, "N")
self.assertEqual(vote.user, self.user)
self.assertEqual(vote.delegated_user, self.admin)
def test_vote_delegation_delegator_not_in_group(self):
self.setup_vote_delegation()
self.user.groups.remove(GROUP_DELEGATE_PK)
self.user.save()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]),
{"data": "N", "user_id": self.user.pk},
)
self.assertHttpStatusVerbose(response, status.HTTP_403_FORBIDDEN)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_delegation_delegator_self_vote_not_allowed(self):
self.setup_vote_delegation()
# Make the user a delegate and present
self.admin.groups.add(GROUP_DELEGATE_PK)
self.admin.groups.remove(GROUP_ADMIN_PK)
self.user.is_present = True
self.user.save()
# Use the user to make the request to vote for himself
user_client = APIClient()
user_client.login(username=self.user.username, password=self.user_password)
response = user_client.post(
reverse("motionpoll-vote", args=[self.poll.pk]),
{"data": "N"},
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
class VoteMotionPollNamedAutoupdates(TestCase):
"""3 important users:
self.admin: manager, has can_see, can_manage, can_manage_polls (in admin group)
self.user1: votes, has can_see perms and in in delegate group
self.other_user: Just has can_see perms and is NOT in the delegate group.
"""
def advancedSetUp(self):
self.motion = Motion(
title="test_title_OoK9IeChe2Jeib9Deeji",
text="test_text_eichui1oobiSeit9aifo",
)
self.motion.save()
self.delegate_group = get_group_model().objects.get(pk=GROUP_DELEGATE_PK)
self.other_user, _ = self.create_user()
inform_changed_data(self.other_user)
self.user, user_password = self.create_user()
self.user.groups.add(self.delegate_group)
self.user.is_present = True
self.user.save()
self.user_client = APIClient()
self.user_client.login(username=self.user.username, password=user_password)
self.poll = MotionPoll.objects.create(
motion=self.motion,
title="test_title_tho8PhiePh8upaex6phi",
pollmethod="YNA",
type=BasePoll.TYPE_NAMED,
state=MotionPoll.STATE_STARTED,
onehundred_percent_base="YN",
majority_method="simple",
)
self.poll.create_options()
self.poll.groups.add(self.delegate_group)
def test_vote(self):
response = self.user_client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "A"}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
vote = MotionVote.objects.get()
# Expect the admin to see the full data in the autoupdate
autoupdate = self.get_last_autoupdate(user=self.admin)
self.assertEqual(
autoupdate[0],
{
"motions/motion-poll:1": {
"motion_id": 1,
"pollmethod": "YNA",
"state": 2,
"type": "named",
"title": "test_title_tho8PhiePh8upaex6phi",
"onehundred_percent_base": "YN",
"majority_method": "simple",
"groups_id": [GROUP_DELEGATE_PK],
"votesvalid": "1.000000",
"votesinvalid": "0.000000",
"votescast": "1.000000",
"options_id": [1],
"id": 1,
"user_has_voted": False,
"user_has_voted_for_delegations": [],
"voted_id": [self.user.id],
},
"motions/motion-vote:1": {
"pollstate": 2,
"id": 1,
"weight": "1.000000",
"value": "A",
"user_id": self.user.id,
"delegated_user_id": self.user.id,
"option_id": 1,
},
"motions/motion-option:1": {
"abstain": "1.000000",
"id": 1,
"no": "0.000000",
"poll_id": 1,
"pollstate": 2,
"yes": "0.000000",
},
},
)
self.assertEqual(autoupdate[1], [])
# Expect user1 to receive his vote
autoupdate = self.get_last_autoupdate(user=self.user)
self.assertEqual(
autoupdate[0]["motions/motion-vote:1"],
{
"pollstate": 2,
"option_id": 1,
"id": 1,
"weight": "1.000000",
"value": "A",
"user_id": self.user.id,
"delegated_user_id": self.user.id,
},
)
self.assertEqual(
autoupdate[0]["motions/motion-option:1"],
{"id": 1, "poll_id": 1, "pollstate": 2},
)
self.assertEqual(autoupdate[1], [])
# Expect non-admins to get a restricted poll update
for user in (self.user, self.other_user):
self.assertAutoupdate(poll, user=user)
autoupdate = self.get_last_autoupdate(user=user)
self.assertEqual(
autoupdate[0]["motions/motion-poll:1"],
{
"motion_id": 1,
"pollmethod": "YNA",
"state": 2,
"type": "named",
"title": "test_title_tho8PhiePh8upaex6phi",
"onehundred_percent_base": "YN",
"majority_method": "simple",
"groups_id": [GROUP_DELEGATE_PK],
"options_id": [1],
"id": 1,
"user_has_voted": user == self.user,
"user_has_voted_for_delegations": [],
},
)
self.assertEqual(
autoupdate[0]["motions/motion-option:1"],
{
"id": 1,
"poll_id": 1,
"pollstate": 2,
}, # noqa black and flake are no friends :(
)
# Other users should not get a vote autoupdate
self.assertNoAutoupdate(vote, user=self.other_user)
self.assertNoDeletedAutoupdate(vote, user=self.other_user)
class VoteMotionPollPseudoanonymousAutoupdates(TestCase):
"""3 important users:
self.admin: manager, has can_see, can_manage, can_manage_polls (in admin group)
self.user: votes, has can_see perms and in in delegate group
self.other_user: Just has can_see perms and is NOT in the delegate group.
"""
def advancedSetUp(self):
self.motion = Motion(
title="test_title_OoK9IeChe2Jeib9Deeji",
text="test_text_eichui1oobiSeit9aifo",
)
self.motion.save()
self.delegate_group = get_group_model().objects.get(pk=GROUP_DELEGATE_PK)
self.other_user, _ = self.create_user()
inform_changed_data(self.other_user)
self.user, user_password = self.create_user()
self.user.groups.add(self.delegate_group)
self.user.is_present = True
self.user.save()
self.user_client = APIClient()
self.user_client.login(username=self.user.username, password=user_password)
self.poll = MotionPoll.objects.create(
motion=self.motion,
title="test_title_cahP1umooteehah2jeey",
pollmethod="YNA",
type=BasePoll.TYPE_PSEUDOANONYMOUS,
state=MotionPoll.STATE_STARTED,
onehundred_percent_base="YN",
majority_method="simple",
)
self.poll.create_options()
self.poll.groups.add(self.delegate_group)
def test_vote(self):
response = self.user_client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "A"}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
vote = MotionVote.objects.get()
# Expect the admin to see the full data in the autoupdate
autoupdate = self.get_last_autoupdate(user=self.admin)
self.assertEqual(
autoupdate[0],
{
"motions/motion-poll:1": {
"motion_id": 1,
"pollmethod": "YNA",
"state": 2,
"type": "pseudoanonymous",
"title": "test_title_cahP1umooteehah2jeey",
"onehundred_percent_base": "YN",
"majority_method": "simple",
"groups_id": [GROUP_DELEGATE_PK],
"votesvalid": "1.000000",
"votesinvalid": "0.000000",
"votescast": "1.000000",
"options_id": [1],
"id": 1,
"user_has_voted": False,
"user_has_voted_for_delegations": [],
"voted_id": [self.user.id],
},
"motions/motion-vote:1": {
"pollstate": 2,
"option_id": 1,
"id": 1,
"weight": "1.000000",
"value": "A",
"user_id": None,
"delegated_user_id": None,
},
"motions/motion-option:1": {
"abstain": "1.000000",
"id": 1,
"no": "0.000000",
"poll_id": 1,
"pollstate": 2,
"yes": "0.000000",
},
},
)
self.assertEqual(autoupdate[1], [])
# Expect non-admins to get a restricted poll update and no autoupdate
# for a changed vote nor a deleted one
for user in (self.user, self.other_user):
self.assertAutoupdate(poll, user=user)
autoupdate = self.get_last_autoupdate(user=user)
self.assertEqual(
autoupdate[0]["motions/motion-poll:1"],
{
"motion_id": 1,
"pollmethod": "YNA",
"state": 2,
"type": "pseudoanonymous",
"title": "test_title_cahP1umooteehah2jeey",
"onehundred_percent_base": "YN",
"majority_method": "simple",
"groups_id": [GROUP_DELEGATE_PK],
"options_id": [1],
"id": 1,
"user_has_voted": user == self.user,
"user_has_voted_for_delegations": [],
},
)
self.assertNoAutoupdate(vote, user=user)
self.assertNoDeletedAutoupdate(vote, user=user)
class VoteMotionPollPseudoanonymous(TestCase):
def setUp(self):
self.client = APIClient()
self.client.login(username="admin", password="admin")
self.motion = Motion(
title="test_title_Chaebaenges1aebe8iev",
text="test_text_cah2aigh6ahc8OhNguQu",
)
self.motion.save()
self.group = get_group_model().objects.get(pk=GROUP_DELEGATE_PK)
self.admin = get_user_model().objects.get(username="admin")
self.poll = MotionPoll.objects.create(
motion=self.motion,
title="test_title_yohphei9Iegohqu9ki7m",
pollmethod="YNA",
type=BasePoll.TYPE_PSEUDOANONYMOUS,
)
self.poll.create_options()
self.poll.groups.add(self.group)
def start_poll(self):
self.poll.state = MotionPoll.STATE_STARTED
self.poll.save()
def make_admin_delegate(self):
self.admin.groups.add(GROUP_DELEGATE_PK)
self.admin.groups.remove(GROUP_ADMIN_PK)
inform_changed_data(self.admin)
def make_admin_present(self):
self.admin.is_present = True
self.admin.save()
def test_start_poll(self):
response = self.client.post(reverse("motionpoll-start", args=[self.poll.pk]))
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.state, MotionPoll.STATE_STARTED)
self.assertEqual(poll.votesvalid, Decimal("0"))
self.assertEqual(poll.votesinvalid, Decimal("0"))
self.assertEqual(poll.votescast, Decimal("0"))
self.assertFalse(poll.get_votes().exists())
def test_vote(self):
self.start_poll()
self.make_admin_delegate()
self.make_admin_present()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "N"}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.votesvalid, Decimal("1"))
self.assertEqual(poll.votesinvalid, Decimal("0"))
self.assertEqual(poll.votescast, Decimal("1"))
self.assertEqual(poll.get_votes().count(), 1)
self.assertEqual(poll.amount_users_voted_with_individual_weight(), 1)
option = poll.options.get()
self.assertEqual(option.yes, Decimal("0"))
self.assertEqual(option.no, Decimal("1"))
self.assertEqual(option.abstain, Decimal("0"))
self.assertTrue(self.admin in poll.voted.all())
vote = option.votes.get()
self.assertEqual(vote.user, None)
def test_change_vote(self):
self.start_poll()
self.make_admin_delegate()
self.make_admin_present()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "N"}
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "A"}
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
option = MotionPoll.objects.get().options.get()
self.assertEqual(option.yes, Decimal("0"))
self.assertEqual(option.no, Decimal("1"))
self.assertEqual(option.abstain, Decimal("0"))
vote = option.votes.get()
self.assertEqual(vote.user, None)
def test_vote_anonymous(self):
self.poll.groups.add(GROUP_DEFAULT_PK)
self.start_poll()
config["general_system_enable_anonymous"] = True
guest_client = APIClient()
response = guest_client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": "Y"}
)
self.assertHttpStatusVerbose(response, status.HTTP_403_FORBIDDEN)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_wrong_state(self):
self.make_admin_present()
self.make_admin_delegate()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": {}}
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_wrong_group(self):
self.start_poll()
self.make_admin_present()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": {}}
)
self.assertHttpStatusVerbose(response, status.HTTP_403_FORBIDDEN)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_not_present(self):
self.start_poll()
self.make_admin_delegate()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": {}}
)
self.assertHttpStatusVerbose(response, status.HTTP_403_FORBIDDEN)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_missing_data(self):
self.start_poll()
self.make_admin_delegate()
self.make_admin_present()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": {}}
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
def test_vote_wrong_data_format(self):
self.start_poll()
self.make_admin_delegate()
self.make_admin_present()
response = self.client.post(
reverse("motionpoll-vote", args=[self.poll.pk]), {"data": [1, 2, 5]}
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertFalse(MotionPoll.objects.get().get_votes().exists())
class StopMotionPoll(TestCase):
def setUp(self):
self.client = APIClient()
self.client.login(username="admin", password="admin")
self.motion = Motion(
title="test_title_eiri4iipeemaeGhahkae",
text="test_text_eegh7quoochaiNgiyeix",
)
self.motion.save()
self.poll = MotionPoll.objects.create(
motion=self.motion,
title="test_title_Hu9Miebopaighee3EDie",
pollmethod="YNA",
type=BasePoll.TYPE_NAMED,
)
self.poll.create_options()
def test_stop_poll(self):
self.poll.state = MotionPoll.STATE_STARTED
self.poll.save()
response = self.client.post(reverse("motionpoll-stop", args=[self.poll.pk]))
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
self.assertEqual(MotionPoll.objects.get().state, MotionPoll.STATE_FINISHED)
def test_stop_wrong_state(self):
response = self.client.post(reverse("motionpoll-stop", args=[self.poll.pk]))
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertEqual(MotionPoll.objects.get().state, MotionPoll.STATE_CREATED)
class PublishMotionPoll(TestCase):
def advancedSetUp(self):
self.motion = Motion(
title="test_title_lai8Ho5gai9aijahRasu",
text="test_text_KieGhosh8ahWiguHeu2D",
)
self.motion.save()
self.poll = MotionPoll.objects.create(
motion=self.motion,
title="test_title_Nufae0iew7Iorox2thoo",
pollmethod="YNA",
type=BasePoll.TYPE_PSEUDOANONYMOUS,
onehundred_percent_base="YN",
majority_method="simple",
)
self.poll.create_options()
option = self.poll.options.get()
self.user, _ = self.create_user()
self.vote = MotionVote.objects.create(
option=option, user=None, weight=Decimal(2), value="N"
)
def test_publish_poll(self):
self.poll.state = MotionPoll.STATE_FINISHED
self.poll.save()
response = self.client.post(reverse("motionpoll-publish", args=[self.poll.pk]))
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
self.assertEqual(MotionPoll.objects.get().state, MotionPoll.STATE_PUBLISHED)
# Test autoupdates: Every user should get the full data
for user in (self.admin, self.user):
autoupdate = self.get_last_autoupdate(user=user)
self.assertEqual(
autoupdate[0],
{
"motions/motion-poll:1": {
"motion_id": 1,
"pollmethod": "YNA",
"state": 4,
"type": "pseudoanonymous",
"title": "test_title_Nufae0iew7Iorox2thoo",
"onehundred_percent_base": "YN",
"majority_method": "simple",
"groups_id": [],
"votesvalid": "0.000000",
"votesinvalid": "0.000000",
"votescast": "0.000000",
"options_id": [1],
"id": 1,
"user_has_voted": False,
"user_has_voted_for_delegations": [],
"voted_id": [],
},
"motions/motion-vote:1": {
"pollstate": 4,
"option_id": 1,
"id": 1,
"weight": "2.000000",
"value": "N",
"user_id": None,
"delegated_user_id": None,
},
"motions/motion-option:1": {
"abstain": "0.000000",
"id": 1,
"no": "2.000000",
"poll_id": 1,
"pollstate": 4,
"yes": "0.000000",
},
},
)
self.assertEqual(autoupdate[1], [])
def test_publish_wrong_state(self):
response = self.client.post(reverse("motionpoll-publish", args=[self.poll.pk]))
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
self.assertEqual(MotionPoll.objects.get().state, MotionPoll.STATE_CREATED)
class PseudoanonymizeMotionPoll(TestCase):
def setUp(self):
self.client = APIClient()
self.client.login(username="admin", password="admin")
self.motion = Motion(
title="test_title_lai8Ho5gai9aijahRasu",
text="test_text_KieGhosh8ahWiguHeu2D",
)
self.motion.save()
self.poll = MotionPoll.objects.create(
motion=self.motion,
title="test_title_Nufae0iew7Iorox2thoo",
pollmethod="YNA",
type=BasePoll.TYPE_NAMED,
state=MotionPoll.STATE_FINISHED,
)
self.poll.create_options()
self.option = self.poll.options.get()
self.user1, _ = self.create_user()
self.vote1 = MotionVote.objects.create(
user=self.user1, option=self.option, value="Y", weight=Decimal(1)
)
self.poll.voted.add(self.user1)
self.user2, _ = self.create_user()
self.vote2 = MotionVote.objects.create(
user=self.user2, option=self.option, value="N", weight=Decimal(1)
)
self.poll.voted.add(self.user2)
def test_pseudoanonymize_poll(self):
response = self.client.post(
reverse("motionpoll-pseudoanonymize", args=[self.poll.pk])
)
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.get_votes().count(), 2)
self.assertEqual(poll.amount_users_voted_with_individual_weight(), 2)
self.assertEqual(poll.votesvalid, Decimal("2"))
self.assertEqual(poll.votesinvalid, Decimal("0"))
self.assertEqual(poll.votescast, Decimal("2"))
option = poll.options.get()
self.assertEqual(option.yes, Decimal("1"))
self.assertEqual(option.no, Decimal("1"))
self.assertEqual(option.abstain, Decimal("0"))
self.assertTrue(self.user1 in poll.voted.all())
self.assertTrue(self.user2 in poll.voted.all())
for vote in poll.get_votes().all():
self.assertTrue(vote.user is None)
def test_pseudoanonymize_wrong_state(self):
self.poll.state = MotionPoll.STATE_CREATED
self.poll.save()
response = self.client.post(
reverse("motionpoll-pseudoanonymize", args=[self.poll.pk])
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
poll = MotionPoll.objects.get()
self.assertTrue(poll.get_votes().filter(user=self.user1).exists())
self.assertTrue(poll.get_votes().filter(user=self.user2).exists())
def test_pseudoanonymize_wrong_type(self):
self.poll.type = MotionPoll.TYPE_ANALOG
self.poll.save()
response = self.client.post(
reverse("motionpoll-pseudoanonymize", args=[self.poll.pk])
)
self.assertHttpStatusVerbose(response, status.HTTP_400_BAD_REQUEST)
poll = MotionPoll.objects.get()
self.assertTrue(poll.get_votes().filter(user=self.user1).exists())
self.assertTrue(poll.get_votes().filter(user=self.user2).exists())
class ResetMotionPoll(TestCase):
def advancedSetUp(self):
self.motion = Motion(
title="test_title_cheiJ1ieph5ohng9queu",
text="test_text_yahng6fiegaL7mooZ2of",
)
self.motion.save()
self.poll = MotionPoll.objects.create(
motion=self.motion,
title="test_title_oozie2Ui9xie0chaghie",
pollmethod="YNA",
type=BasePoll.TYPE_ANALOG,
state=MotionPoll.STATE_FINISHED,
)
self.poll.create_options()
self.option = self.poll.options.get()
self.user1, _ = self.create_user()
self.vote1 = MotionVote.objects.create(
user=self.user1, option=self.option, value="Y", weight=Decimal(1)
)
self.poll.voted.add(self.user1)
self.user2, _ = self.create_user()
self.vote2 = MotionVote.objects.create(
user=self.user2, option=self.option, value="N", weight=Decimal(1)
)
self.poll.voted.add(self.user2)
def test_reset_poll(self):
response = self.client.post(reverse("motionpoll-reset", args=[self.poll.pk]))
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
self.assertEqual(poll.get_votes().count(), 0)
self.assertEqual(poll.amount_users_voted_with_individual_weight(), 0)
self.assertEqual(poll.votesvalid, None)
self.assertEqual(poll.votesinvalid, None)
self.assertEqual(poll.votescast, None)
option = poll.options.get()
self.assertEqual(option.yes, Decimal("0"))
self.assertEqual(option.no, Decimal("0"))
self.assertEqual(option.abstain, Decimal("0"))
self.assertFalse(option.votes.exists())
def test_deleted_autoupdate(self):
response = self.client.post(reverse("motionpoll-reset", args=[self.poll.pk]))
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
poll = MotionPoll.objects.get()
option = poll.options.get()
self.assertAutoupdate(option, self.admin)
for user in (self.admin, self.user1, self.user2):
self.assertDeletedAutoupdate(self.vote1, user=user)
self.assertDeletedAutoupdate(self.vote2, user=user)
class TestMotionPollWithVoteDelegationAutoupdate(TestCase):
def advancedSetUp(self):
""" Set up user -> other_user delegation. """
self.motion = Motion(
title="test_title_dL91JqhMTiQuQLSDRItZ",
text="test_text_R7nURdXKVEfEnnJBXJYa",
)
self.motion.save()
self.delegate_group = get_group_model().objects.get(pk=GROUP_DELEGATE_PK)
self.other_user, _ = self.create_user()
self.user, user_password = self.create_user()
self.user.groups.add(self.delegate_group)
self.user.is_present = True
self.user.vote_delegated_to = self.other_user
self.user.save()
self.user_client = APIClient()
self.user_client.login(username=self.user.username, password=user_password)
self.poll = MotionPoll.objects.create(
motion=self.motion,
title="test_title_Q3EuRaALSCCPJuQ2tMqj",
pollmethod="YNA",
type=BasePoll.TYPE_NAMED,
onehundred_percent_base="YN",
majority_method="simple",
)
self.poll.create_options()
self.poll.groups.add(self.delegate_group)
self.poll.save()
def test_start_poll(self):
response = self.client.post(reverse("motionpoll-start", args=[self.poll.pk]))
self.assertHttpStatusVerbose(response, status.HTTP_200_OK)
# other_user has to receive an autoupdate because he was delegated
autoupdate = self.get_last_autoupdate(user=self.other_user)
assert "motions/motion-poll:1" in autoupdate[0]
|
geojames/Dart_EnvGIS
|
Week6-1_Pandas.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
__author__ = 'James T. Dietrich'
__contact__ = 'james.t.dietrich@dartmouth.edu'
__copyright__ = '(c) James Dietrich 2016'
__license__ = 'MIT'
__date__ = 'Wed Nov 16 11:33:39 2016'
__version__ = '1.0'
__status__ = "initial release"
__url__ = "https://github.com/geojames/..."
"""
Name: Week6-1_Pandas.py
Compatibility: Python 3.5
Description: This program does stuff
URL: https://github.com/geojames/...
Requires: libraries
Dev ToDo:
AUTHOR: James T. Dietrich
ORGANIZATION: Dartmouth College
Contact: james.t.dietrich@dartmouth.edu
Copyright: (c) James Dietrich 2016
"""
#------------------------------------------------------------------------------
# Pandas provides a set of labeled array data structures
# Similar to what you're used to in Excel
# + 1-D "Series"
# + 2-D "Data Frames"
# Import Pandas
import pandas as pd
# You also need Numpy
import numpy as np
# and Matplotlib, if you want to plot data
import matplotlib.pyplot as plt
#%% Creating Pandas data
# chances are you'll be importing data into Pandas with read_csv, which we'll
# get to...but you'll also need to create Pandas data from scratch too
# Pandas uses two data structures:
# Series = 1-D data (single column)
# Data Frame = 2-D data (multiple columns)
# Terms:
# + Column = actual data
# + Index Column = first column, defaults to numbers, but also you can
# also name rows in the index column or use dates as the index column
# * Yes, it is confusing, since data frames still have indecies like
# arrays...
#
# The general structure of a data frame is:
#
# Index COL1 COL2 COL3 ...
# 1 data data data
# 2 data data data
# 3 data data data
# 4 data data data
# ...
# of for real data would look something like this:
# Index temp pres wind wind_dir ...
# 1 26.2 1012.3 0 nan
# 2 27.0 1011.1 5 "w"
# 3 27.8 1011.5 8 "nw"
# 4 24.3 1011.3 5 "w"
# ...
# 1-D series data are easy...
s = pd.Series([1,3,5,np.nan,6,8])
# 2-D data can be created in a couple different ways
# 1: typing in matrix vaules
df = pd.DataFrame([[1,2,3,4,5], [6,7,8,9,10]],columns=['A','B','C','D','E'])
# 2: from an exisitng Numpy Array
array = np.array([[1,2,3,4,5], [6,7,8,9,10]])
df2 = pd.DataFrame(array)
# 3: numpy array with column names
array = np.arange(1.,17.).reshape((4,4))
df3 = pd.DataFrame(array, columns=['A','B','C','D'])
# adding columns
# df['new column name'] = data to add...
colors = pd.Series(['blue','green','red','yellow'])
df3['color'] = colors
# directly...
df3['food'] = np.array(['fish','lettcue','apple','banana'])
#%% Accessing index and column name lists
df.index
df.columns
df.values
df.dtypes
df.describe()
#%% Accessng/adding/deleting column data
# access the column by name: string OR by dot notation
df3['A']
df3.A
# adding data
df3['color'] = pd.Series(['blue','green','red','yellow'])
df3['random'] = np.random.rand(4,1)
# new columns by math
df3['AxRand'] = df3.A * df3.random
# boolean tests as columns
df3['Blue_TF'] = df3.color == 'blue'
# adding one value to all rows (propogated value)
df3['ones'] = 'one'
# deleting columns
del df3['random']
# extracting values and deleting (pop out)
rand_vals = df3.pop('AxRand')
# Inserting new columns
# insert(position, col_name, values)
df3.insert(0,'AxR',rand_vals)
#%% Date ranges
# often your data will need dates. Pandas has some nifty short cuts for adding
# dates to your data frames
# Start and end dates with frequency
# pd.date_range(start, end, freq = 'D')
pd.date_range('1/1/2016','12/31/2018')
# Start, frequency and number or periods
# pd.date_range(start, periods=10, freq='D')
# Frequency Codes
# 'A' = year end - 'AS' = year start
# 'M' = month end - 'MS' = month start
# 'W' = week
# 'D' = days, default
# 'H' = hours
# 'min' = minutes
# 's' = seconds
#
# '5H' = 5 hours, '30s' = 30 seconds, '7D' = 7 days
pd.date_range('1/1/2016',periods=10, freq='D')
pd.date_range('1/1/2016',periods=10, freq='min')
# adding time (24-hour format)
pd.date_range('1/1/2016 07:30:00',periods=10, freq='30d')
#%% Data Frame Indexing
dates = pd.date_range('1/1/2016', periods=8)
df = pd.DataFrame(np.random.randn(8, 4), index=dates, columns=['A', 'B', 'C', 'D'])
# Getting Columns
df.A
df['B']
# Rows, all columns
# by classic index
df[0:4]
# by named index range
df['2016-01-02':'2016-01-04']
# using the location (loc) function
# dates index, row 0
df.loc[dates[0]]
# row and column
df.loc[2:4,['A','B']]
# index and columns
df.loc[dates[2],'A'] # or is you have a generic index: df.loc[index[2],'A']
# Pure index locations (iloc) function
# rows and columns, like arrays
df.iloc[3:5,0:2]
#%% Data Frame Math
# Math on an entire Data Frame
df2 = df * 3
# In place replacement (not recommended, but good for recalculating and overwriting)
df.B = df.B * 2
# better to create a new column
df['Btimes2'] = df.B * 2
# math between columns
df['convert'] = ((df.A * 9/5) + 32) - df.C
# math for specific indecies
df['indexMath'] = df.D[2:5] ** 2
# math with boolean masks
# only apply the equation to positive vaules in A
df['convertPos'] = ((df.A[df.A >=0] * 9/5) + 32) - df.C[df.A >=0]
# Apply a different equation to the negative values
df.convertPos[df.A < 0] = ((df.A[df.A < 0] * 9/5) + 32) + df.D[df.A < 0]
#%% Reduction
dates = pd.date_range('1/1/2016', periods=8)
df = pd.DataFrame(np.random.randn(8, 4), index=dates, columns=['A', 'B', 'C', 'D'])
# Basic stats (default is columns, and to skip NaN values)
# for row stats add (axis=0)
df.mean()
df.std()
df.median()
# Getting statistics for a specific column
df.A.mean()
# Sums (again by column)
df.sum()
df.B.sum()
#%% Combining / Adding Values
df1 = pd.DataFrame({'A': ['A0', 'A1', 'A2', 'A3'],
'B': ['B0', 'B1', 'B2', 'B3'],
'C': ['C0', 'C1', 'C2', 'C3'],
'D': ['D0', 'D1', 'D2', 'D3']},
index=[0, 1, 2, 3])
df2 = pd.DataFrame({'A': ['A4', 'A5', 'A6', 'A7'],
'B': ['B4', 'B5', 'B6', 'B7'],
'C': ['C4', 'C5', 'C6', 'C7'],
'D': ['D4', 'D5', 'D6', 'D7']},
index=[4, 5, 6, 7])
series = pd.Series(['X10', 'X11', 'X12', 'X13'], name='X')
result = pd.concat([df1, df2])
result2 = pd.concat([df1, df2], axis = 1)
result = df1.append(df2)
result = pd.concat([df1,series],axis=1)
# combining data with matching indicies
dates = pd.date_range('1/1/2016', periods=8)
df = pd.DataFrame(np.random.randn(8, 4), index=dates, columns=['A', 'B', 'C', 'D'])
dates = pd.date_range('1/4/2016', periods=8)
df2 = pd.DataFrame(np.random.randn(8, 2), index=dates, columns=['E', 'F'])
df3 = pd.concat([df, df2], axis=1)
#%% Converting Pandas to Numpy
# single column
np_from_pd = pd.np.array(df3.A)
# whole thing
np_from_pd = pd.np.array(df3)
# using numpy santax
array = np.array(df3.values)
# Monster statments with multiple instructions
np_with_reshape = pd.np.array(df3.A).reshape((2,2))
|
DesertBot/DesertBot
|
desertbot/modules/commands/Splatoon.py
|
"""
Created on Sep 01, 2017
@author: StarlitGhost
"""
import datetime
import time
from twisted.plugin import IPlugin
from twisted.words.protocols.irc import assembleFormattedText, attributes as A
from zope.interface import implementer
from desertbot.message import IRCMessage
from desertbot.moduleinterface import IModule
from desertbot.modules.commandinterface import BotCommand
from desertbot.response import IRCResponse
from desertbot.utils import string
@implementer(IPlugin, IModule)
class Splatoon(BotCommand):
def triggers(self):
return ['splat']
def help(self, query):
return "splat [regular/ranked/league/fest]"
graySplitter = assembleFormattedText(A.normal[' ', A.fg.gray['|'], ' '])
def _fetch(self, j, short, mode, label):
r = j[mode]
data = []
t = A.normal[A.bold['{} {}: '.format(label, r[0]['rule']['name'])],
'/'.join([r[0]['stage_a']['name'], r[0]['stage_b']['name']])]
data.append(assembleFormattedText(t))
if not short:
# include next maps
now = int(time.time())
startTime = r[1]['startTime']
delta = startTime - now
d = datetime.timedelta(seconds=delta)
deltaStr = string.deltaTimeToString(d, resolution='m')
t = A.normal[A.bold['{} {} in {}: '.format(label, r[1]['rule']['name'], deltaStr)],
'/'.join([r[1]['stage_a']['name'], r[1]['stage_b']['name']])]
data.append(assembleFormattedText(t))
return ' | '.join(data)
def _regular(self, j, short):
return self._fetch(j, short, 'regular', 'Regular')
def _ranked(self, j, short):
return self._fetch(j, short, 'gachi', 'Ranked')
def _league(self, j, short):
return self._fetch(j, short, 'league', 'League')
def _fest(self, j, short):
if j['splatfests']:
pass
elif not short:
return 'No SplatFest is currently scheduled'
def execute(self, message: IRCMessage):
url = "https://splatoon2.ink/data/schedules.json"
response = self.bot.moduleHandler.runActionUntilValue('fetch-url', url)
j = response.json()
if len(message.parameterList) < 1:
# do everything
data = []
data += filter(None, [self._regular(j, short=True)])
data += filter(None, [self._ranked(j, short=True)])
data += filter(None, [self._league(j, short=True)])
data += filter(None, [self._fest(j, short=True)])
return IRCResponse(self.graySplitter.join(data), message.replyTo)
else:
subCommands = {
'regular': self._regular,
'ranked': self._ranked,
'league': self._league,
'fest': self._fest
}
subCommand = message.parameterList[0].lower()
if subCommand in subCommands:
return IRCResponse(subCommands[subCommand](j, short=False), message.replyTo)
else:
return IRCResponse(self.help(None), message.replyTo)
splatoon = Splatoon()
|
zaqwes8811/micro-apps
|
self_driving/deps/Kalman_and_Bayesian_Filters_in_Python_master/experiments/bicycle.py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 28 08:19:21 2015
@author: Roger
"""
from math import *
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import Polygon
wheelbase = 100 #inches
vel = 20 *12 # fps to inches per sec
steering_angle = radians(1)
t = 1 # second
orientation = 0. # radians
pos = np.array([0., 0.]
for i in range(100):
#if abs(steering_angle) > 1.e-8:
turn_radius = tan(steering_angle)
radius = wheelbase / tan(steering_angle)
dist = vel*t
arc_len = dist / (2*pi*radius)
turn_angle = 2*pi * arc_len
cx = pos[0] - radius * sin(orientation)
cy = pos[1] + radius * cos(orientation)
orientation = (orientation + turn_angle) % (2.0 * pi)
pos[0] = cx + (sin(orientation) * radius)
pos[1] = cy - (cos(orientation) * radius)
plt.scatter(pos[0], pos[1])
plt.axis('equal')
|
hstorm/nn_spatial
|
notebooks/test_spatial.py
|
# -*- coding: utf-8 -*-
import numpy as np
from scipy.spatial import distance_matrix
from scipy import sparse
import pandas as pd
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout, Activation, Flatten
from keras.optimizers import SGD,RMSprop
#from keras.wrappers.scikit_learn import KerasRegressor
from keras import regularizers
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import MinMaxScaler
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import RandomizedSearchCV
from scipy.stats import randint as sp_randint
from sklearn import preprocessing
from sklearn.pipeline import Pipeline
from keras.utils import to_categorical
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
from keras.layers import Convolution2D, MaxPooling2D
import time
#%%
# =============================================================================
# Gernerate data
# =============================================================================
sizeGrid = 5 # size of the grid
nObs = 6 # number of observations
K = 3 # number of features
# set coefficients
b1 = np.random.normal(5, 2, K).reshape(K,1) # coef for own characteristics
b2 = np.random.normal(5, 2, K).reshape(K,1) # coef for neighbor characteristics
errStd = 2 # error added to Y
cutW = 6 # cut distance in W
#%%
def generate_landscape(sizeGrid,nObs,K,b1,b2,errStd,cutW):
# Create location of observations
locOwn = np.array([[sizeGrid/2,sizeGrid/2]])
while True:
loc = np.random.randint(0,sizeGrid, size=(nObs,2))
locAll = np.concatenate((locOwn, loc), axis=0)
# Make sure that farm are not one same location
locAll = np.unique(locAll, axis=0)
if nObs+1 == locAll.shape[0]:
break
# Create own characteristics
X = np.random.randint(0,100, size=(nObs,K))
Xown = np.random.randint(0,100, size=(1,K))
# Create spatial weigthing matrix W
W = distance_matrix(locOwn, loc)<cutW
np.fill_diagonal(W,0) # fill diagonal with zeros
row_sum = W.sum(axis=1,keepdims=True) # calc row sum
row_sum[row_sum==0] = 1
W = W/row_sum # row standardize
# Create error
err = np.random.normal(0, errStd, 1)
# Calcualte Y
Y = np.matmul(Xown,b1)+ np.matmul(np.matmul(W,X),b2)+err
assert(Y.shape==(1,1))
maps = np.zeros((sizeGrid,sizeGrid,K))
#
for k in range(0,K):
I = np.concatenate((locOwn[:,0],loc[:,0]),axis=0)
J = np.concatenate((locOwn[:,1],loc[:,1]),axis=0)
V = np.concatenate((Xown[:,k],X[:,k]),axis=0)
A = sparse.coo_matrix((V,(I,J)),shape=(sizeGrid,sizeGrid))
maps[:,:,k] = A.todense()
#
return maps,Y
# %%
N = 100
X = np.zeros((N,sizeGrid*sizeGrid*K))
Y = np.zeros((N,1))
for i in range(0,N):
#
maps,y = generate_landscape(sizeGrid,nObs,K,b1,b2,errStd,cutW)
Y[i,:] = y
#X[i,:,:,:] = maps
X[i,:] = maps.T.reshape((1,maps.shape[0]*maps.shape[1]*maps.shape[2]))
# %%
# =============================================================================
# Train model
# =============================================================================
xDim = X.shape[1]
yDim = 1
def dropout2_model(lr=0.1,decay=1e-6,momentum=0.9,dropout_rate=0.2):
"""
Define a modelwith dropout.
Argument:
lr -- learning rate
decay --
momentum --
dropout_rate --
Returns:
model -- Keras model
"""
model = Sequential()
model.add(Dropout(dropout_rate, input_shape=(xDim,)))
model.add(Dropout(dropout_rate))
model.add(Dense(200, kernel_initializer='normal', activation='relu'))
model.add(Dropout(dropout_rate))
model.add(Dense(50, kernel_initializer='normal', activation='relu'))
model.add(Dropout(dropout_rate))
model.add(Dense(yDim, kernel_initializer='normal'))
# Compile model
#sgd = SGD(lr=lr, decay=decay, momentum=momentum, nesterov=True)
rmsprop = RMSprop(lr=0.001, rho=0.9, epsilon=1e-08, decay=0.0)
model.compile(loss='mean_squared_error', optimizer=rmsprop)
return model
# ## fix random seed for reproducibility
seed = 8
np.random.seed(seed)
# %% standardized features and targets
x_min_max = MinMaxScaler()
y_min_max = MinMaxScaler()
X_minmax = x_min_max.fit_transform(X)
Y_minmax = y_min_max.fit_transform(Y)
# %% Split sample in test and training_set
X_train, X_test, Y_train, Y_test = train_test_split(X_minmax, Y_minmax, test_size=0.10, random_state=42)
#%%
estimator= dropout2_model()
estimator.fit(X,Y,epochs=30, batch_size=124, validation_data=(X, Y))
# %%
Yhat_test = estimator.predict(X_test,batch_size=32)
oY_test = y_min_max.inverse_transform(Y_test)
oY_hat = y_min_max.inverse_transform(Yhat_test)
# %%
fig, ax = plt.subplots()
ax.scatter(oY_test, oY_hat, edgecolors=(0, 0, 0))
ax.plot([oY_test.min(), oY_test.max()], [oY_test.min(), oY_test.max()], 'k--', lw=4)
ax.set_xlabel('Measured')
ax.set_ylabel('Predicted')
plt.show()
# %%
def r2(y_true, y_pred):
"""Calcualte and return R2.
y_true -- the observed values
y_pred -- the prediced values
"""
SS_res = np.sum(np.square(y_true - y_pred))
SS_tot = np.sum(np.square(y_true - np.mean(y_true)))
return ( 1 - SS_res/SS_tot )
r2Model = r2(oY_test,oY_hat)
print("R2 Model: ",r2Model)
|
seerjk/reboot06
|
arch_pre_work/mtsleep1.py
|
import thread
from time import sleep, ctime
def loop0():
print 'start loop 0 at: ', ctime()
sleep(4)
print 'loop 0 done at: ', ctime()
def loop1():
print 'start loop 1 at: ', ctime()
sleep(2)
print 'loop 1 done at: ', ctime()
def main():
print 'starting at: ', ctime()
thread.start_new_thread(loop0, ())
thread.start_new_thread(loop1, ())
sleep(6)
print 'all DONE at: ', ctime()
if __name__ == '__main__':
main()
|
robinedwards/neomodel
|
test/test_issue283.py
|
"""
Provides a test case for issue 283 - "Inheritance breaks".
The issue is outlined here: https://github.com/neo4j-contrib/neomodel/issues/283
More information about the same issue at:
https://github.com/aanastasiou/neomodelInheritanceTest
The following example uses a recursive relationship for economy, but the
idea remains the same: "Instantiate the correct type of node at the end of
a relationship as specified by the model"
"""
import os
import neomodel
import datetime
import pytest
import random
try:
basestring
except NameError:
basestring = str
# Set up a very simple model for the tests
class PersonalRelationship(neomodel.StructuredRel):
"""
A very simple relationship between two basePersons that simply records
the date at which an acquaintance was established.
This relationship should be carried over to anything that inherits from
basePerson without any further effort.
"""
on_date = neomodel.DateProperty(default_now = True)
class BasePerson(neomodel.StructuredNode):
"""
Base class for defining some basic sort of an actor.
"""
name = neomodel.StringProperty(required = True, unique_index = True)
friends_with = neomodel.RelationshipTo("BasePerson", "FRIENDS_WITH",
model = PersonalRelationship)
class TechnicalPerson(BasePerson):
"""
A Technical person specialises BasePerson by adding their expertise.
"""
expertise = neomodel.StringProperty(required = True)
class PilotPerson(BasePerson):
"""
A pilot person specialises BasePerson by adding the type of airplane they
can operate.
"""
airplane = neomodel.StringProperty(required = True)
class BaseOtherPerson(neomodel.StructuredNode):
"""
An obviously "wrong" class of actor to befriend BasePersons with.
"""
car_color = neomodel.StringProperty(required = True)
class SomePerson(BaseOtherPerson):
"""
Concrete class that simply derives from BaseOtherPerson.
"""
pass
# Test cases
def test_automatic_object_resolution():
"""
Node objects at the end of relationships are instantiated to their
corresponding Python object.
"""
# Create a few entities
A = TechnicalPerson.get_or_create({"name":"Grumpy", "expertise":"Grumpiness"})[0]
B = TechnicalPerson.get_or_create({"name":"Happy", "expertise":"Unicorns"})[0]
C = TechnicalPerson.get_or_create({"name":"Sleepy", "expertise":"Pillows"})[0]
# Add connections
A.friends_with.connect(B)
B.friends_with.connect(C)
C.friends_with.connect(A)
# If A is friends with B, then A's friends_with objects should be
# TechnicalPerson (!NOT basePerson!)
assert type(A.friends_with[0]) is TechnicalPerson
A.delete()
B.delete()
C.delete()
def test_recursive_automatic_object_resolution():
"""
Node objects are instantiated to native Python objects, both at the top
level of returned results and in the case where they are returned within
lists.
"""
# Create a few entities
A = TechnicalPerson.get_or_create({"name":"Grumpier", "expertise":"Grumpiness"})[0]
B = TechnicalPerson.get_or_create({"name":"Happier", "expertise":"Grumpiness"})[0]
C = TechnicalPerson.get_or_create({"name":"Sleepier", "expertise":"Pillows"})[0]
D = TechnicalPerson.get_or_create({"name":"Sneezier", "expertise":"Pillows"})[0]
# Retrieve mixed results, both at the top level and nested
L, _ = neomodel.db.cypher_query("MATCH (a:TechnicalPerson) "
"WHERE a.expertise='Grumpiness' "
"WITH collect(a) as Alpha "
"MATCH (b:TechnicalPerson) "
"WHERE b.expertise='Pillows' "
"WITH Alpha, collect(b) as Beta "
"RETURN [Alpha, [Beta, [Beta, ['Banana', "
"Alpha]]]]", resolve_objects = True)
# Assert that a Node returned deep in a nested list structure is of the
# correct type
assert type(L[0][0][0][1][0][0][0][0]) is TechnicalPerson
# Assert that primitive data types remain primitive data types
assert issubclass(type(L[0][0][0][1][0][1][0][1][0][0]), basestring)
A.delete()
B.delete()
C.delete()
D.delete()
def test_validation_with_inheritance_from_db():
"""
Objects descending from the specified class of a relationship's end-node are
also perfectly valid to appear as end-node values too
"""
#Create a few entities
# Technical Persons
A = TechnicalPerson.get_or_create({"name":"Grumpy", "expertise":"Grumpiness"})[0]
B = TechnicalPerson.get_or_create({"name":"Happy", "expertise":"Unicorns"})[0]
C = TechnicalPerson.get_or_create({"name":"Sleepy", "expertise":"Pillows"})[0]
# Pilot Persons
D = PilotPerson.get_or_create({"name":"Porco Rosso", "airplane":"Savoia-Marchetti"})[0]
E = PilotPerson.get_or_create({"name":"Jack Dalton", "airplane":"Beechcraft Model 18"})[0]
# TechnicalPersons can befriend PilotPersons and vice-versa and that's fine
# TechnicalPersons befriend Technical Persons
A.friends_with.connect(B)
B.friends_with.connect(C)
C.friends_with.connect(A)
# Pilot Persons befriend Pilot Persons
D.friends_with.connect(E)
# Technical Persons befriend Pilot Persons
A.friends_with.connect(D)
E.friends_with.connect(C)
# This now means that friends_with of a TechnicalPerson can
# either be TechnicalPerson or Pilot Person (!NOT basePerson!)
assert (type(A.friends_with[0]) is TechnicalPerson) or (type(A.friends_with[0]) is PilotPerson)
assert (type(A.friends_with[1]) is TechnicalPerson) or (type(A.friends_with[1]) is PilotPerson)
assert type(D.friends_with[0]) is PilotPerson
A.delete()
B.delete()
C.delete()
D.delete()
E.delete()
def test_validation_enforcement_to_db():
"""
If a connection between wrong types is attempted, raise an exception
"""
#Create a few entities
# Technical Persons
A = TechnicalPerson.get_or_create({"name":"Grumpy", "expertise":"Grumpiness"})[0]
B = TechnicalPerson.get_or_create({"name":"Happy", "expertise":"Unicorns"})[0]
C = TechnicalPerson.get_or_create({"name":"Sleepy", "expertise":"Pillows"})[0]
# Pilot Persons
D = PilotPerson.get_or_create({"name":"Porco Rosso", "airplane":"Savoia-Marchetti"})[0]
E = PilotPerson.get_or_create({"name":"Jack Dalton", "airplane":"Beechcraft Model 18"})[0]
#Some Person
F = SomePerson(car_color = "Blue").save()
# TechnicalPersons can befriend PilotPersons and vice-versa and that's fine
A.friends_with.connect(B)
B.friends_with.connect(C)
C.friends_with.connect(A)
D.friends_with.connect(E)
A.friends_with.connect(D)
E.friends_with.connect(C)
# Trying to befriend a Technical Person with Some Person should raise an
# exception
with pytest.raises(ValueError):
A.friends_with.connect(F)
A.delete()
B.delete()
C.delete()
D.delete()
E.delete()
F.delete()
def test_failed_object_resolution():
"""
A Neo4j driver node FROM the database contains labels that are unaware to
neomodel's Database class. This condition raises ClassDefinitionNotFound
exception.
"""
class RandomPerson(BasePerson):
randomness = neomodel.FloatProperty(default = random.random)
# A Technical Person...
A = TechnicalPerson.get_or_create({"name":"Grumpy", "expertise":"Grumpiness"})[0]
# A Random Person...
B = RandomPerson.get_or_create({"name":"Mad Hatter"})[0]
A.friends_with.connect(B)
# Simulate the condition where the definition of class RandomPerson is not
# known yet.
del neomodel.db._NODE_CLASS_REGISTRY[frozenset(["RandomPerson","BasePerson"])]
# Now try to instantiate a RandomPerson
A = TechnicalPerson.get_or_create({"name":"Grumpy", "expertise":"Grumpiness"})[0]
with pytest.raises(neomodel.exceptions.ModelDefinitionMismatch):
for some_friend in A.friends_with:
print(some_friend.name)
A.delete()
B.delete()
def test_node_label_mismatch():
"""
A Neo4j driver node FROM the database contains a superset of the known
labels.
"""
class SuperTechnicalPerson(TechnicalPerson):
superness = neomodel.FloatProperty(default=1.0)
class UltraTechnicalPerson(SuperTechnicalPerson):
ultraness = neomodel.FloatProperty(default=3.1415928)
# Create a TechnicalPerson...
A = TechnicalPerson.get_or_create({"name":"Grumpy", "expertise":"Grumpiness"})[0]
# ...that is connected to an UltraTechnicalPerson
F = UltraTechnicalPerson(name = "Chewbaka", expertise="Aarrr wgh ggwaaah").save()
A.friends_with.connect(F)
# Forget about the UltraTechnicalPerson
del neomodel.db._NODE_CLASS_REGISTRY[frozenset(["UltraTechnicalPerson",
"SuperTechnicalPerson",
"TechnicalPerson",
"BasePerson"])]
# Recall a TechnicalPerson and enumerate its friends.
# One of them is UltraTechnicalPerson which would be returned as a valid
# node to a friends_with query but is currently unknown to the node class registry.
A = TechnicalPerson.get_or_create({"name":"Grumpy", "expertise":"Grumpiness"})[0]
with pytest.raises(neomodel.exceptions.ModelDefinitionMismatch):
for some_friend in A.friends_with:
print(some_friend.name)
def test_attempted_class_redefinition():
"""
A neomodel.StructuredNode class is attempted to be redefined.
"""
def redefine_class_locally():
# Since this test has already set up a class hierarchy in its global scope, we will try to redefine
# SomePerson here.
# The internal structure of the SomePerson entity does not matter at all here.
class SomePerson(BaseOtherPerson):
uid = neomodel.UniqueIdProperty()
with pytest.raises(neomodel.exceptions.ClassAlreadyDefined):
redefine_class_locally()
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-2.2/Lib/distutils/command/build_py.py
|
"""distutils.command.build_py
Implements the Distutils 'build_py' command."""
# created 1999/03/08, Greg Ward
__revision__ = "$Id: build_py.py,v 1.34 2001/12/06 20:59:17 fdrake Exp $"
import sys, string, os
from types import *
from glob import glob
from distutils.core import Command
from distutils.errors import *
class build_py (Command):
description = "\"build\" pure Python modules (copy to build directory)"
user_options = [
('build-lib=', 'd', "directory to \"build\" (copy) to"),
('compile', 'c', "compile .py to .pyc"),
('no-compile', None, "don't compile .py files [default]"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('force', 'f', "forcibly build everything (ignore file timestamps)"),
]
boolean_options = ['compile', 'force']
negative_opt = {'no-compile' : 'compile'}
def initialize_options (self):
self.build_lib = None
self.py_modules = None
self.package = None
self.package_dir = None
self.compile = 0
self.optimize = 0
self.force = None
def finalize_options (self):
self.set_undefined_options('build',
('build_lib', 'build_lib'),
('force', 'force'))
# Get the distribution options that are aliases for build_py
# options -- list of packages and list of modules.
self.packages = self.distribution.packages
self.py_modules = self.distribution.py_modules
self.package_dir = self.distribution.package_dir
# Ick, copied straight from install_lib.py (fancy_getopt needs a
# type system! Hell, *everything* needs a type system!!!)
if type(self.optimize) is not IntType:
try:
self.optimize = int(self.optimize)
assert 0 <= self.optimize <= 2
except (ValueError, AssertionError):
raise DistutilsOptionError, "optimize must be 0, 1, or 2"
def run (self):
# XXX copy_file by default preserves atime and mtime. IMHO this is
# the right thing to do, but perhaps it should be an option -- in
# particular, a site administrator might want installed files to
# reflect the time of installation rather than the last
# modification time before the installed release.
# XXX copy_file by default preserves mode, which appears to be the
# wrong thing to do: if a file is read-only in the working
# directory, we want it to be installed read/write so that the next
# installation of the same module distribution can overwrite it
# without problems. (This might be a Unix-specific issue.) Thus
# we turn off 'preserve_mode' when copying to the build directory,
# since the build directory is supposed to be exactly what the
# installation will look like (ie. we preserve mode when
# installing).
# Two options control which modules will be installed: 'packages'
# and 'py_modules'. The former lets us work with whole packages, not
# specifying individual modules at all; the latter is for
# specifying modules one-at-a-time. Currently they are mutually
# exclusive: you can define one or the other (or neither), but not
# both. It remains to be seen how limiting this is.
# Dispose of the two "unusual" cases first: no pure Python modules
# at all (no problem, just return silently), and over-specified
# 'packages' and 'py_modules' options.
if not self.py_modules and not self.packages:
return
if self.py_modules and self.packages:
raise DistutilsOptionError, \
"build_py: supplying both 'packages' and 'py_modules' " + \
"options is not allowed"
# Now we're down to two cases: 'py_modules' only and 'packages' only.
if self.py_modules:
self.build_modules()
else:
self.build_packages()
self.byte_compile(self.get_outputs(include_bytecode=0))
# run ()
def get_package_dir (self, package):
"""Return the directory, relative to the top of the source
distribution, where package 'package' should be found
(at least according to the 'package_dir' option, if any)."""
path = string.split(package, '.')
if not self.package_dir:
if path:
return apply(os.path.join, path)
else:
return ''
else:
tail = []
while path:
try:
pdir = self.package_dir[string.join(path, '.')]
except KeyError:
tail.insert(0, path[-1])
del path[-1]
else:
tail.insert(0, pdir)
return apply(os.path.join, tail)
else:
# Oops, got all the way through 'path' without finding a
# match in package_dir. If package_dir defines a directory
# for the root (nameless) package, then fallback on it;
# otherwise, we might as well have not consulted
# package_dir at all, as we just use the directory implied
# by 'tail' (which should be the same as the original value
# of 'path' at this point).
pdir = self.package_dir.get('')
if pdir is not None:
tail.insert(0, pdir)
if tail:
return apply(os.path.join, tail)
else:
return ''
# get_package_dir ()
def check_package (self, package, package_dir):
# Empty dir name means current directory, which we can probably
# assume exists. Also, os.path.exists and isdir don't know about
# my "empty string means current dir" convention, so we have to
# circumvent them.
if package_dir != "":
if not os.path.exists(package_dir):
raise DistutilsFileError, \
"package directory '%s' does not exist" % package_dir
if not os.path.isdir(package_dir):
raise DistutilsFileError, \
("supposed package directory '%s' exists, " +
"but is not a directory") % package_dir
# Require __init__.py for all but the "root package"
if package:
init_py = os.path.join(package_dir, "__init__.py")
if os.path.isfile(init_py):
return init_py
else:
self.warn(("package init file '%s' not found " +
"(or not a regular file)") % init_py)
# Either not in a package at all (__init__.py not expected), or
# __init__.py doesn't exist -- so don't return the filename.
return
# check_package ()
def check_module (self, module, module_file):
if not os.path.isfile(module_file):
self.warn("file %s (for module %s) not found" %
(module_file, module))
return 0
else:
return 1
# check_module ()
def find_package_modules (self, package, package_dir):
self.check_package(package, package_dir)
module_files = glob(os.path.join(package_dir, "*.py"))
modules = []
setup_script = os.path.abspath(self.distribution.script_name)
for f in module_files:
abs_f = os.path.abspath(f)
if abs_f != setup_script:
module = os.path.splitext(os.path.basename(f))[0]
modules.append((package, module, f))
else:
self.debug_print("excluding %s" % setup_script)
return modules
def find_modules (self):
"""Finds individually-specified Python modules, ie. those listed by
module name in 'self.py_modules'. Returns a list of tuples (package,
module_base, filename): 'package' is a tuple of the path through
package-space to the module; 'module_base' is the bare (no
packages, no dots) module name, and 'filename' is the path to the
".py" file (relative to the distribution root) that implements the
module.
"""
# Map package names to tuples of useful info about the package:
# (package_dir, checked)
# package_dir - the directory where we'll find source files for
# this package
# checked - true if we have checked that the package directory
# is valid (exists, contains __init__.py, ... ?)
packages = {}
# List of (package, module, filename) tuples to return
modules = []
# We treat modules-in-packages almost the same as toplevel modules,
# just the "package" for a toplevel is empty (either an empty
# string or empty list, depending on context). Differences:
# - don't check for __init__.py in directory for empty package
for module in self.py_modules:
path = string.split(module, '.')
package = string.join(path[0:-1], '.')
module_base = path[-1]
try:
(package_dir, checked) = packages[package]
except KeyError:
package_dir = self.get_package_dir(package)
checked = 0
if not checked:
init_py = self.check_package(package, package_dir)
packages[package] = (package_dir, 1)
if init_py:
modules.append((package, "__init__", init_py))
# XXX perhaps we should also check for just .pyc files
# (so greedy closed-source bastards can distribute Python
# modules too)
module_file = os.path.join(package_dir, module_base + ".py")
if not self.check_module(module, module_file):
continue
modules.append((package, module_base, module_file))
return modules
# find_modules ()
def find_all_modules (self):
"""Compute the list of all modules that will be built, whether
they are specified one-module-at-a-time ('self.py_modules') or
by whole packages ('self.packages'). Return a list of tuples
(package, module, module_file), just like 'find_modules()' and
'find_package_modules()' do."""
if self.py_modules:
modules = self.find_modules()
else:
modules = []
for package in self.packages:
package_dir = self.get_package_dir(package)
m = self.find_package_modules(package, package_dir)
modules.extend(m)
return modules
# find_all_modules ()
def get_source_files (self):
modules = self.find_all_modules()
filenames = []
for module in modules:
filenames.append(module[-1])
return filenames
def get_module_outfile (self, build_dir, package, module):
outfile_path = [build_dir] + list(package) + [module + ".py"]
return apply(os.path.join, outfile_path)
def get_outputs (self, include_bytecode=1):
modules = self.find_all_modules()
outputs = []
for (package, module, module_file) in modules:
package = string.split(package, '.')
filename = self.get_module_outfile(self.build_lib, package, module)
outputs.append(filename)
if include_bytecode:
if self.compile:
outputs.append(filename + "c")
if self.optimize > 0:
outputs.append(filename + "o")
return outputs
def build_module (self, module, module_file, package):
if type(package) is StringType:
package = string.split(package, '.')
elif type(package) not in (ListType, TupleType):
raise TypeError, \
"'package' must be a string (dot-separated), list, or tuple"
# Now put the module source file into the "build" area -- this is
# easy, we just copy it somewhere under self.build_lib (the build
# directory for Python source).
outfile = self.get_module_outfile(self.build_lib, package, module)
dir = os.path.dirname(outfile)
self.mkpath(dir)
return self.copy_file(module_file, outfile, preserve_mode=0)
def build_modules (self):
modules = self.find_modules()
for (package, module, module_file) in modules:
# Now "build" the module -- ie. copy the source file to
# self.build_lib (the build directory for Python source).
# (Actually, it gets copied to the directory for this package
# under self.build_lib.)
self.build_module(module, module_file, package)
# build_modules ()
def build_packages (self):
for package in self.packages:
# Get list of (package, module, module_file) tuples based on
# scanning the package directory. 'package' is only included
# in the tuple so that 'find_modules()' and
# 'find_package_tuples()' have a consistent interface; it's
# ignored here (apart from a sanity check). Also, 'module' is
# the *unqualified* module name (ie. no dots, no package -- we
# already know its package!), and 'module_file' is the path to
# the .py file, relative to the current directory
# (ie. including 'package_dir').
package_dir = self.get_package_dir(package)
modules = self.find_package_modules(package, package_dir)
# Now loop over the modules we found, "building" each one (just
# copy it to self.build_lib).
for (package_, module, module_file) in modules:
assert package == package_
self.build_module(module, module_file, package)
# build_packages ()
def byte_compile (self, files):
from distutils.util import byte_compile
prefix = self.build_lib
if prefix[-1] != os.sep:
prefix = prefix + os.sep
# XXX this code is essentially the same as the 'byte_compile()
# method of the "install_lib" command, except for the determination
# of the 'prefix' string. Hmmm.
if self.compile:
byte_compile(files, optimize=0,
force=self.force,
prefix=prefix,
verbose=self.verbose, dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force,
prefix=prefix,
verbose=self.verbose, dry_run=self.dry_run)
# class build_py
|
andriibekker/django-swaps
|
swaps/views.py
|
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponseRedirect, Http404
from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.views.generic import date_based
from django.conf import settings
from django.db.models import Q
from django.conf import settings
from swaps.models import Offer, Swap
from swaps.forms import OfferForm, ProposeSwapForm, ProposingOfferForm
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
else:
notification = None
try:
from threadedcomments.models import ThreadedComment
forums = True
except ImportError:
forums = False
@login_required
def offers(request, username=None):
offers = Offer.objects.filter(state=1).order_by("-offered_time")
return render_to_response("swaps/offers.html", {"offers": offers}, context_instance=RequestContext(request))
@login_required
def offer(request, offer_id):
offer = get_object_or_404(Offer, id=offer_id)
#deletable = offer.is_deletable()
return render_to_response("swaps/offer.html", {
"offer": offer,
#"deletable": deletable,
}, context_instance=RequestContext(request))
@login_required
def your_offers(request):
user = request.user
offers = Offer.objects.filter(offerer=user).order_by("-offered_time")
return render_to_response("swaps/your_offers.html", {"offers": offers}, context_instance=RequestContext(request))
@login_required
def swap(request, swap_id):
swap = get_object_or_404(Swap, id=swap_id)
return render_to_response("swaps/swap.html", {
"swap": swap,
}, context_instance=RequestContext(request))
@login_required
def proposed_by_you(request):
swaps = Swap.objects.filter(proposing_offer__offerer=request.user, state=1).order_by("-proposed_time")
return render_to_response("swaps/proposed_by_you.html", {"swaps": swaps}, context_instance=RequestContext(request))
@login_required
def proposed_to_you(request):
swaps = Swap.objects.filter(responding_offer__offerer=request.user, state=1).order_by("-proposed_time")
return render_to_response("swaps/proposed_to_you.html", {"swaps": swaps}, context_instance=RequestContext(request))
@login_required
def accepted_swaps(request):
swaps = Swap.objects.filter(
Q(state=2, proposing_offer__offerer=request.user) |
Q(state=2, responding_offer__offerer=request.user)).order_by("-accepted_time")
return render_to_response("swaps/accepted.html", {"swaps": swaps}, context_instance=RequestContext(request))
@login_required
def dead_swaps(request):
swaps = Swap.objects.filter(
Q(state__gt=3, proposing_offer__offerer=request.user) |
Q(state__gt=3, responding_offer__offerer=request.user)).order_by("-killed_time")
return render_to_response("swaps/dead.html", {"swaps": swaps}, context_instance=RequestContext(request))
@login_required
def new(request):
def is_valid(formsets):
valid = True
for name, formset in formsets:
valid = valid and formset.is_valid()
return valid
formsets = []
if request.method == "POST":
if request.POST["action"] == "create":
for name, formset_class in OfferForm.inlines:
formsets+=[(name, formset_class(request.POST, request.FILES)),]
offer_form = OfferForm(request.POST, request.FILES)
formsets_valid = is_valid(formsets)
offer_form.inlines = formsets
if offer_form.is_valid() and formsets_valid:
offer = offer_form.save(commit=False)
offer.offerer = request.user
offer.save()
for name, formset_class in OfferForm.inlines:
f = formset_class(request.POST, request.FILES, instance=offer)
if f.is_valid():
f.save()
request.user.message_set.create(message=_("Successfully saved offer '%s'") % offer.short_description)
#if notification:
# if friends: # @@@ might be worth having a shortcut for sending to all friends
# notification.send((x['friend'] for x in Friendship.objects.friends_for_user(offer.offerer)), "offer_friend_post", {"post": blog})
return HttpResponseRedirect(reverse("offer_list_yours"))
else:
offer_form = OfferForm()
else:
offer_form = OfferForm()
return render_to_response("swaps/new_offer.html", {
"offer_form": offer_form
}, context_instance=RequestContext(request))
@login_required
def edit_offer(request, offer_id):
def is_valid(formsets):
valid = True
for name, formset in formsets:
valid = valid and formset.is_valid()
return valid
offer = get_object_or_404(Offer, id=offer_id)
if offer.offerer != request.user:
request.user.message_set.create(message="You cannot edit offers that are not yours")
return HttpResponseRedirect(reverse("offer_list_yours"))
return_to = request.GET['returnto']
formsets = []
if request.method == "POST":
if request.POST["action"] == "update":
offer_form = OfferForm(request.POST, instance=offer)
for name, formset_class in OfferForm.inlines:
formsets+=[(name, formset_class(request.POST, request.FILES, instance=offer)),]
formsets_valid = is_valid(formsets)
offer_form.inlines = formsets
if offer_form.is_valid() and formsets_valid:
offer = offer_form.save(commit=False)
offer.save()
for name, formset_class in OfferForm.inlines:
f = formset_class(request.POST, request.FILES, instance=offer)
if f.is_valid():
f.save()
if notification:
for swap in offer.proposed_swaps.filter(state=1):
notification.send([swap.responding_offer.offerer,], "swaps_proposing_offer_changed",
{"creator": request.user,
"swap": swap,
"proposing_offer": swap.proposing_offer,
"responding_offer": swap.responding_offer})
for swap in offer.responding_swaps.filter(state=1):
notification.send([swap.proposing_offer.offerer,], "swaps_responding_offer_changed",
{"creator": request.user,
"swap": swap,
"proposing_offer": swap.proposing_offer,
"responding_offer": swap.responding_offer})
request.user.message_set.create(message=_("Successfully updated offer '%s'") % offer.short_description)
return HttpResponseRedirect(reverse(return_to))
else:
offer_form = OfferForm(instance=offer)
for name, formset_class in OfferForm.inlines:
formsets+=[(name, formset_class(None, None, instance=offer)),]
else:
offer_form = OfferForm(instance=offer)
for name, formset_class in OfferForm.inlines:
formsets+=[(name, formset_class(None, None, instance=offer)),]
offer_form.inlines = formsets
return render_to_response("swaps/edit_offer.html", {
"offer_form": offer_form,
"offer": offer,
}, context_instance=RequestContext(request))
@login_required
def delete_offer(request, offer_id):
offer = get_object_or_404(Offer, id=offer_id)
if offer.offerer != request.user:
request.user.message_set.create(message="You cannot delete offers that are not yours")
return HttpResponseRedirect(reverse("offer_list_yours"))
if request.method == "POST":
offer.delete()
return HttpResponseRedirect(reverse("offer_list_yours"))
@login_required
def cancel_offer(request, offer_id):
offer = get_object_or_404(Offer, id=offer_id)
if offer.offerer != request.user:
request.user.message_set.create(message="You cannot cancel offers that are not yours")
return HttpResponseRedirect(reverse("offer_list_yours"))
if request.method == "POST":
offer.cancel()
return HttpResponseRedirect(reverse("offer_list_yours"))
@login_required
def propose_swap(request, offer_id):
offer = get_object_or_404(Offer, id=offer_id)
if request.method == "POST":
swap_form = ProposeSwapForm(request.POST)
offer_form = ProposingOfferForm(request.POST)
swap = None
if swap_form.is_valid():
swap = swap_form.save(commit=False)
swap.responding_offer = offer
swap.save()
if offer_form.is_valid():
proposing_offer = offer_form.save(commit=False)
proposing_offer.offerer = request.user
proposing_offer.save()
swap = Swap(
proposing_offer=proposing_offer,
responding_offer=offer)
swap.save()
if swap:
if notification:
notification.send([offer.offerer,], "swaps_proposal",
{"creator": request.user,
"swap": swap,
"proposing_offer": swap.proposing_offer,
"responding_offer": swap.responding_offer})
return HttpResponseRedirect(reverse("proposed_by_you"))
else:
swap_form = ProposeSwapForm()
swap_form.fields['proposing_offer'].queryset = Offer.objects.filter(offerer=request.user, state=1)
offer_form = ProposingOfferForm()
return render_to_response("swaps/propose_swap.html", {
"offer": offer,
"swap_form": swap_form,
"offer_form": offer_form,
}, context_instance=RequestContext(request))
@login_required
def accept_swap(request, swap_id):
swap = get_object_or_404(Swap, id=swap_id)
swap.accept()
swap.save()
if notification:
notification.send([swap.proposing_offer.offerer,], "swaps_acceptance",
{"creator": request.user,
"swap": swap,
"proposing_offer": swap.proposing_offer,
"responding_offer": swap.responding_offer})
return HttpResponseRedirect(reverse("accepted_swaps"))
@login_required
def reject_swap(request, swap_id):
swap = get_object_or_404(Swap, id=swap_id)
swap.reject()
swap.save()
if notification:
notification.send([swap.proposing_offer.offerer,], "swaps_rejection",
{"creator": request.user,
"swap": swap,
"proposing_offer": swap.proposing_offer,
"responding_offer": swap.responding_offer})
return HttpResponseRedirect(reverse("dead_swaps"))
@login_required
def cancel_swap(request, swap_id):
swap = get_object_or_404(Swap, id=swap_id)
swap.cancel()
swap.save()
if notification:
notification.send([swap.responding_offer.offerer,], "swaps_cancellation",
{"creator": request.user,
"swap": swap,
"proposing_offer": swap.proposing_offer,
"responding_offer": swap.responding_offer})
return HttpResponseRedirect(reverse("dead_swaps"))
|
Koisell/SmartCoffeeMachine
|
python/recognitionService_api/flask_app.py
|
from os import remove
from flask import jsonify, request
from sqlalchemy import create_engine
from sqlalchemy.schema import MetaData
from sqlite3 import dbapi2 as sqlite
from cv2 import imread, COLOR_RGB2GRAY, cvtColor
from cv2.face import LBPHFaceRecognizer_create
from faces_recognizer import Recognizer
engine = create_engine('sqlite+pysqlite:///../sqlite/cafeDB.db', module=sqlite)
meta = MetaData()
meta.reflect(bind=engine)
meta.bind = engine
users_table = meta.tables['User']
recognizer = Recognizer(LBPHFaceRecognizer_create)
recognizer.set_recognizer_xml('faces.xml')
def add_route(app):
''' Add routes to a flask app Class. See API swagger doc'''
@app.route('/users/<id>', methods=['GET'])
def get_user(id):
try:
id = int(id)
except ValueError:
return "Invalid id", 400
print(id)
result = users_table.select().where(users_table.c.id == id).execute().first()
if result:
return jsonify(dict(result.items())), 200
else:
return "User not found", 404
@app.route('/users', methods=["POST"])
def new_user():
body = request.get_json()
try:
username, intensity, volume = [body[k] for k in ("username", "intensity", "volume")]
except (KeyError, TypeError):
return "Incorrect body", 400
# Because sqlite engine cannot return created tuple, it's not possible to get the id after creation.
# So we retrieve it.
id_max = engine.execute("SELECT seq FROM SQLITE_SEQUENCE WHERE name='User'").first()
new_id = id_max[0] + 1
stmt = users_table.insert().values(id=new_id, username=username, intensity=intensity, volume=volume)
stmt.execute()
# Get the create users.
result = users_table.select().where(users_table.c.id == new_id).execute().first()
return jsonify(dict(result.items()))
@app.route('/users/<id>', methods=["PUT"])
def modify_user(id):
try:
id = int(id)
except ValueError:
return "Invalid id", 400
find = users_table.select().where(users_table.c.id == id).execute().first()
if find:
body = request.get_json()
username, intensity, volume = [body.get(k) for k in ("username", "intensity", "volume")]
query = users_table.update().where(users_table.c.id == id)
if username:
query = query.values(username=username)
if intensity:
query = query.values(intensity=intensity)
if volume:
query = query.values(volume=volume)
query.execute()
result = users_table.select().where(users_table.c.id == id).execute().first()
return jsonify(dict(result.items()))
else:
return "User not found", 404
@app.route('/users/<id>', methods=["DELETE"])
def remove_user(id):
try:
id = int(id)
except ValueError:
return "Invalid id", 400
find = users_table.select().where(users_table.c.id == id).execute().first()
if find:
users_table.delete().where(users_table.c.id == id).execute()
return "User succesfuly destroyed"
else:
return "User not found", 404
@app.route('/recognition', methods=["POST"])
def recognize_faceimage():
if 'image' not in request.files:
return "no image", 401
file = request.files['image']
if file.filename == '':
return "Bad Request", 400
file.save(file.filename)
id, prob = recognizer.predict(cvtColor(imread(file.filename), COLOR_RGB2GRAY))
result = users_table.select().where(users_table.c.id == id).execute().first()
remove(file.filename)
if result:
return jsonify(dict(result.items()))
else:
return "User unsucessfully recognized", 400
|
ncullen93/pyBN
|
pyBN/classes/_tests/test_bayesnet.py
|
"""
********
UnitTest
BayesNet
********
Method Checks that
assertEqual(a, b) a == b
assertNotEqual(a, b) a != b
assertTrue(x) bool(x) is True
assertFalse(x) bool(x) is False
assertIs(a, b) a is b
assertIsNot(a, b) a is not b
assertIsNone(x) x is None
assertIsNotNone(x) x is not None
assertIn(a, b) a in b
assertNotIn(a, b) a not in b
assertIsInstance(a, b) isinstance(a, b)
assertNotIsInstance(a, b) not isinstance(a, b)
assertAlmostEqual(a, b) round(a-b, 7) == 0
assertNotAlmostEqual(a, b) round(a-b, 7) != 0
assertGreater(a, b) a > b
assertGreaterEqual(a, b) a >= b
assertLess(a, b) a < b
assertLessEqual(a, b) a <= b
assertListEqual(a, b) lists
assertTupleEqual(a, b) tuples
assertSetEqual(a, b) sets or frozensets
assertDictEqual(a, b) dicts
"""
__author__ = """Nicholas Cullen <ncullen.th@dartmouth.edu>"""
import unittest
from pyBN.classes.bayesnet import BayesNet
from pyBN.readwrite.read import read_bn
import os
from os.path import dirname
class BayesNetTestCase(unittest.TestCase):
def setUp(self):
self.bn = BayesNet()
self.dpath = os.path.join(dirname(dirname(dirname(dirname(__file__)))),'data')
self.bn_bif = read_bn(os.path.join(self.dpath,'cancer.bif'))
self.bn_bn = read_bn(os.path.join(self.dpath,'cmu.bn'))
def tearDown(self):
pass
def test_isinstance(self):
self.assertIsInstance(self.bn,BayesNet)
def test_V_bif(self):
self.assertListEqual(self.bn_bif.V,
['Smoker', 'Pollution', 'Cancer', 'Xray', 'Dyspnoea'])
def test_E_bif(self):
self.assertDictEqual(self.bn_bif.E,
{'Cancer': ['Xray', 'Dyspnoea'],
'Dyspnoea': [],
'Pollution': ['Cancer'],
'Smoker': ['Cancer'],
'Xray': []})
def test_F_bif(self):
self.assertDictEqual(self.bn_bif.F,
{'Cancer': {'cpt': [0.03, 0.97, 0.05, 0.95, 0.001, 0.999, 0.02, 0.98],
'parents': ['Pollution', 'Smoker'],
'values': ['True', 'False']},
'Dyspnoea': {'cpt': [0.65, 0.35, 0.3, 0.7],
'parents': ['Cancer'],
'values': ['True', 'False']},
'Pollution': {'cpt': [0.9, 0.1], 'parents': [], 'values': ['low', 'high']},
'Smoker': {'cpt': [0.3, 0.7], 'parents': [], 'values': ['True', 'False']},
'Xray': {'cpt': [0.9, 0.1, 0.2, 0.8],
'parents': ['Cancer'],
'values': ['positive', 'negative']}})
def test_V_bn(self):
self.assertListEqual(self.bn_bn.V,
['Burglary', 'Earthquake', 'Alarm', 'JohnCalls', 'MaryCalls'])
def test_E_bn(self):
self.assertDictEqual(self.bn_bn.E,
{'Alarm': ['JohnCalls', 'MaryCalls'],
'Burglary': ['Alarm'],
'Earthquake': ['Alarm'],
'JohnCalls': [],
'MaryCalls': []})
def test_F_bn(self):
self.assertDictEqual(self.bn_bn.F,
{'Alarm': {'cpt': [0.999, 0.001, 0.71, 0.29, 0.06, 0.94, 0.05, 0.95],
'parents': ['Earthquake', 'Burglary'],
'values': ['No', 'Yes']},
'Burglary': {'cpt': [0.999, 0.001], 'parents': [], 'values': ['No', 'Yes']},
'Earthquake': {'cpt': [0.998, 0.002], 'parents': [], 'values': ['No', 'Yes']},
'JohnCalls': {'cpt': [0.95, 0.05, 0.1, 0.9],
'parents': ['Alarm'],
'values': ['No', 'Yes']},
'MaryCalls': {'cpt': [0.99, 0.01, 0.3, 0.7],
'parents': ['Alarm'],
'values': ['No', 'Yes']}})
def test_nodes(self):
n = list(self.bn_bn.nodes())
self.assertListEqual(n,
['Burglary', 'Earthquake', 'Alarm', 'JohnCalls', 'MaryCalls'])
def test_cpt(self):
cpt = list(self.bn_bn.cpt('Alarm'))
self.assertListEqual(cpt,
[0.999, 0.001, 0.71, 0.29, 0.06, 0.94, 0.05, 0.95])
def test_card(self):
self.assertEqual(self.bn_bn.card('Alarm'),2)
def test_scope(self):
self.assertListEqual(self.bn_bn.scope('Alarm'),
['Alarm', 'Earthquake', 'Burglary'])
def test_parents(self):
self.assertListEqual(self.bn_bn.parents('Alarm'),
['Earthquake','Burglary'])
def test_values(self):
self.assertListEqual(self.bn_bn.values('Alarm'),['No','Yes'])
def test_values_idx(self):
self.assertEqual(self.bn_bn.values('Alarm')[1],'Yes')
if __name__ == '__main__':
unittest.main(exit=False)
|
shriyanka/daemo-forum
|
crowdsourcing/validators/utils.py
|
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from rest_framework.compat import unicode_to_repr
from rest_framework.exceptions import ValidationError
from rest_framework.utils.representation import smart_repr
from csp import settings
class EqualityValidator(object):
message = _('The fields {field_names} must be equal.')
missing_message = _('This field is required.')
def __init__(self, fields, message=None):
self.fields = fields
self.serializer_field = None
self.message = message or self.message
def set_context(self, serializer):
"""
This hook is called by the serializer instance,
prior to the validation call being made.
"""
self.instance = getattr(serializer, 'instance', None)
self.initial_data = getattr(serializer,'initial_data', None)
self.validate_non_fields = getattr(serializer,'validate_non_fields', False)
def __call__(self,*args, **kwargs):
if self.validate_non_fields:
if self.fields[0] not in self.initial_data or self.fields[1] not in self.initial_data:
raise ValidationError("Both fields are required.")
if self.initial_data.get(self.fields[0],'Password1') != self.initial_data.get(self.fields[1],'Password2'):
field_names = ', '.join(self.fields)
raise ValidationError(self.message.format(field_names=field_names))
class LengthValidator(object):
message = _('Field {field_name} must be at least {length} characters long.')
missing_message = _('Field {field_name} is required.')
def __init__(self, field, length, message=None):
self.field = field
self.length = length
self.serializer_field = None
self.message = message or self.message
def set_context(self, serializer):
self.initial_data = getattr(serializer,'initial_data', None)
self.validate_non_fields = getattr(serializer,'validate_non_fields', False)
def __call__(self, *args, **kwargs):
if self.validate_non_fields:
if self.field not in self.initial_data:
raise ValidationError(self.missing_message.format(field_name=self.field))
if len(self.initial_data[self.field]) < self.length:
raise ValidationError(self.message.format(field_name=self.field, length=self.length))
class RegistrationAllowedValidator(object):
message = _('Currently registrations are not allowed.')
def __call__(self, *args, **kwargs):
if not args[0]['admin_override'] and not settings.REGISTRATION_ALLOWED:
raise ValidationError(self.message)
|
lukassup/python-cli
|
crud_cli/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""An example CRUD CLI app."""
from __future__ import (
absolute_import,
print_function,
unicode_literals
)
import argparse
import json
import logging
import sys
TAGS = (
'red',
'orange',
'yellow',
'green',
'blue',
'violet',
'black',
'white',
'gray',
)
class CLI(object):
"""A really basic version of the famous Python Click library."""
log = logging.getLogger(__name__)
common_args = argparse.ArgumentParser(add_help=False)
log_group = common_args.add_mutually_exclusive_group()
log_group.add_argument(
'-v',
'--verbose',
dest='verbosity',
default=[logging.INFO],
action='append_const',
const=-10,
help='more verbose',
)
log_group.add_argument(
'-q',
'--quiet',
dest='verbosity',
action='append_const',
const=10,
help='less verbose',
)
def __init__(self):
self.parser = argparse.ArgumentParser()
self.subparsers = self.parser.add_subparsers(help='subcommands',
dest='command')
self.subparsers.required = True
def command(self, name, *args, **kwargs):
"""Register a function to the command-line interface."""
def wrapper(f):
f.parser = self.subparsers.add_parser(
name, *args, description=f.__doc__,
parents=[self.common_args], **kwargs)
if getattr(f, 'cli_args', None) is not None:
for fargs, fkwargs in f.cli_args:
f.parser.add_argument(*fargs, **fkwargs)
f.parser.set_defaults(action=f)
return f
return wrapper
def option(self, *args, **kwargs):
"""Register CLI arguments for function.
Accepts the same arguments as ArgumentParser().add_argument(...)
"""
def wrapper(f):
if getattr(f, 'cli_args', None) is None:
f.cli_args = []
f.cli_args.append((args, kwargs))
return f
return wrapper
def run(self):
"""Parse arguments and run the default action."""
args = self.parser.parse_args()
# init logging
log_level = max(logging.DEBUG, min(logging.CRITICAL, sum(args.verbosity)))
debug_on = log_level <= logging.DEBUG
logging.basicConfig(level=log_level)
kwargs = dict(vars(args))
# sanitize excess arguments, obiously there are better ways!
kwargs.pop('action', None)
kwargs.pop('command', None)
kwargs.pop('verbosity', None)
try:
# callback action
args.action(**kwargs)
except Exception as e:
self.log.error(e, exc_info=debug_on)
sys.exit(1)
sys.exit(0)
cli = CLI()
@cli.command('create')
@cli.option('name')
@cli.option('-s', '--size', type=int, choices=range(3))
@cli.option('-t', '--tag', choices=TAGS)
def create_command(name, size, tag):
"""Creates a resource."""
cli.log.info('creating resource')
cli.log.info(json.dumps({'name': name, 'size': size, 'tag': tag}))
@cli.command('update')
@cli.option('name')
@cli.option('-s', '--size', type=int, choices=range(3))
@cli.option('-t', '--tag', choices=TAGS)
def update_command(name, size=None, tag=None):
"""Updates a resource."""
cli.log.info('updating resource')
cli.log.info(json.dumps({'name': name, 'size': size, 'tag': tag}))
@cli.command('delete')
@cli.option('name')
def delete_command(name):
"""Delete a resource."""
cli.log.info('deleting resource %s' % name)
@cli.command('list')
def list_command():
"""Lists resources."""
cli.log.info('listing all resources')
@cli.command('find')
@cli.option('query')
def find_command(query):
"""Finds resources by query."""
cli.log.info('listing resources matching query %r' % query)
if __name__ == '__main__':
cli.run()
|
PedrosWits/smart-cameras
|
smartcameras/speedcamera.py
|
import uuid
import datetime
import time
import numpy as np
import threading
import vehicle
import azurehook
import json
class SpeedCamera(object):
TOPIC = "speedcamera"
EVENT_ACTIVATION = "ACTIVATION"
EVENT_DEACTIVATION = "DEACTIVATION"
EVENT_VEHICLE = "OBSERVATION"
def __init__(self, street, city, cloudhook = None, name = None):
self.id = str(uuid.uuid4())
self.street = street
self.city = city
self.isActive = False
self.speedLimit = None
self.rate = None
if cloudhook is None:
self.cloudhook = azurehook.AzureHook()
self.cloudhook.createTopic(self.TOPIC)
if name is not None:
self.name = name
def relocate(self, street, city = None):
self.street = street
if(city is not None):
self.city = city
# Most commonly executes on its own thread
def activate(self, speedLimit, rate):
if(self.isActive is True):
raise EnvironmentError('Speed camera is already active: deactivate first.')
self.speedLimit = speedLimit
self.rate = rate
self.datetime = datetime.datetime.now()
self.isActive = True
# Inform Azure of activated camera
self.__notifyCloudOfSelf()
# Event representing the passing of the next vehicle
self.nextVehicle = threading.Event()
# Loop until deactivate is called
# (preferably from a separate thread/process!!!!)
while self.isActive:
nextArrival = self.__genNextArrival()
self.nextVehicle.wait(timeout=nextArrival)
# Vehicle has passed - Create new vehicle
self.__onObservedVehicle()
# End of Loop
self.datetime = datetime.datetime.now()
self.__notifyCloudOfSelf()
# Preferably called from a separate thread
def deactivate(self):
if not self.isActive:
raise ValueError("Camera is not active")
self.isActive = False
self.nextVehicle.set()
def toDict(self):
return {"id" : self.id,
"street" : self.street,
"city" : self.city,
"rate" : self.rate,
"speedLimit" : self.speedLimit,
"isActive" : str(self.isActive),
"timestamp" : datetimeToTimestamp(self.datetime)}
def toJson(self):
return json.dumps(self.toDict(), indent = 4, sort_keys = True)
## Helping/Private methods
################################################
def __genNextArrival(self):
if(self.rate is None):
raise ValueError("Rate is undefined")
return np.random.exponential(1./self.rate)
def __notifyCloudOfSelf(self):
dic = {}
if self.isActive:
dic['event'] = self.EVENT_ACTIVATION
else:
dic['event'] = self.EVENT_DEACTIVATION
dic['camera'] = self.toDict()
json_string = json.dumps(dic, indent = 4, sort_keys = False)
self.cloudhook.publish(self.TOPIC, json_string, extra = {'event' : dic['event']})
def __notifyCloudOfVehicle(self, vehicle):
dic = {}
dic['event'] = self.EVENT_VEHICLE
dic['vehicle'] = vehicle.toDict()
dic['camera'] = self.toDict()
dic['timestamp'] = str(datetimeToTimestamp(datetime.datetime.now()))
json_string = json.dumps(dic, indent = 4, sort_keys = True)
self.cloudhook.publish(self.TOPIC, json_string,
extra = {'event' : dic['event'],
'isSpeeding' : dic['vehicle']['isSpeeding'],
'isPriority' : dic['vehicle']['isPriority']})
def __onObservedVehicle(self):
aVehicle = vehicle.NormalVehicle(self.speedLimit)
self.__notifyCloudOfVehicle(aVehicle)
##########################################################################################
##########################################################################################
##########################################################################################
#
# Helping Functions
#
##########################################################################################
##########################################################################################
##########################################################################################
def datetimeToTimestamp(dt):
return (dt - datetime.datetime(1970, 1, 1)).total_seconds()
## Global "factory" functions
def activateInNewThread(camera, speedLimit, rate, daemon = True):
thread = threading.Thread(target=camera.activate, args=(speedLimit, rate))
thread.daemon = daemon
thread.start()
return thread
|
IT-PM-OpenAdaptronik/Webapp
|
apps/register/forms.py
|
""" License
MIT License
Copyright (c) 2017 OpenAdaptronik
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import UserCreationForm
from rattler.forms import BaseForm
from captcha.fields import ReCaptchaField
class RegisterForm(UserCreationForm, BaseForm):
""" User registration form.
Extends the django.contrib.auth.forms.UserCreationForm and rattler.forms.BaseForm.
"""
captcha = ReCaptchaField(
label="",
)
class Meta:
""" Meta informations.
"""
model = get_user_model()
fields = UserCreationForm.Meta.fields + ('email', 'password1', 'password2')
|
WittscraftStudios/business-card-raytracer
|
python/card.py
|
#!/usr/bin/env python3
import sys
from math import sqrt, hypot
from random import randint
from array import array
class Vector3d:
'''
//Define a vector class with constructor and operator: 'v'
struct v{
f x,y,z; // Vector has three float attributes.
v operator+(v r){return v(x+r.x,y+r.y,z+r.z);} //Vector add
v operator*(f r){return v(x*r,y*r,z*r);} //Vector scaling
f operator%(v r){return x*r.x+y*r.y+z*r.z;} //Vector dot product
v(){} //Empty constructor
v operator^(v r){return v(y*r.z-z*r.y,z*r.x-x*r.z,x*r.y-y*r.x);} //Cross-product
v(f a,f b,f c){x=a;y=b;z=c;} //Constructor
v operator!(){return *this*(1 /sqrt(*this%*this));} // Used later for normalizing the vector
};
'''
typecode = 'd'
__slots__ = ('x', 'y', 'z')
def __init__(self, x: float, y: float, z: float):
self.x = float(x)
self.y = float(y)
self.z = float(z)
@classmethod
def frombytes(cls, octets):
typecode = chr(octets[0])
memv = memoryview(octets[1:]).cast(typecode)
return cls(*memv)
def __iter__(self):
return (i for i in (self.x, self.y, self.z))
def __repr__(self):
class_name = type(self).__name__
return '{}({!r}, {!r}, {!r})'.format(class_name, *self)
def __str__(self):
return str(tuple(self))
def __bytes__(self):
return (bytes([ord(self.typecode)]) +
bytes(array(self.typecode, self)))
def __eq__(self, other: Vector3d):
return tuple(self) == tuple(other)
def __abs__(self):
return hypot(hypot(self.x, self.y), self.z)
def __bool__(self):
return bool(abs(self))
def __add__(self, other: Vector3d):
return Vector(self.x + other.x, self.y + other.y, self.z + other.z)
def __mul__(self, scale: Vector3d):
return Vector(self.x * scale, self.y * scale, self.z * scale)
def __matmul__(self, other: Vector3d):
return Vector(self.x * other.x, self.y * other.y, self.z * other.y)
def _cross_product(self, other: Vector3d):
return Vector(
self.y * other.z - self.z * other.y,
self.z * other.x - self.x * other.z,
self.x * other.y - self.y * other.x
)
def _normalisation(self):
return self * (1 / sqrt(self.__dot__(self)))
'''
//The set of sphere positions describing the world.
//Those integers are in fact bit vectors.
i G[]={247570,280596,280600,249748,18578,18577,231184,16,16};
/*
16 1
16 1
231184 111 111 1
18577 1 1 1 1 1
18578 1 1 1 1 1
249748 1111 11111 1 1
280600 1 1 1 11
280596 1 1 1 1 1
247570 1111 111 1 1
*/
'''
G = [247570, 280596, 280600, 249748, 18578, 18577, 231184, 16, 16]
def random() -> float:
'''
// Random generator, return a float within range [0-1]
f R(){return(f)rand()/RAND_MAX;}
Sadly Python's `random.random` is `[0.0, 1.0)` so we have to
use `random.randint` which is inclusive.
'''
return randint(0, 1000000) / 1000000.0
def intersection_test(o: Vector3d, d: Vector3d, t: float, n: Vector3d) -> int:
'''
//The intersection test for line [o,v].
// Return 2 if a hit was found (and also return distance t and bouncing ray n).
// Return 0 if no hit was found but ray goes upward
// Return 1 if no hit was found but ray goes downward
i T(v o,v d,f& t,v& n){
t=1e9;
i m=0;
f p=-o.z/d.z;
if(.01<p)
t=p,n=v(0,0,1),m=1;
//The world is encoded in G, with 9 lines and 19 columns
for(i k=19;k--;) //For each columns of objects
for(i j=9;j--;) //For each line on that columns
if(G[j]&1<<k){ //For this line j, is there a sphere at column i ?
// There is a sphere but does the ray hits it ?
v p=o+v(-k,0,-j-4);
f b=p%d,c=p%p-1,q=b*b-c;
//Does the ray hit the sphere ?
if(q>0){
//It does, compute the distance camera-sphere
f s=-b-sqrt(q);
if(s<t && s>.01)
// So far this is the minimum distance, save it. And also
// compute the bouncing ray vector into 'n'
t=s,
n=!(p+d*t),
m=2;
}
}
return m;
}
'''
t = 10^9
m = 0
p = -o.z / d.z
if .01 < p:
t = p
n = Vector3d(0., 0., 1.)
m = 1
for k in range(19, 0, -1):
for j in range(9, 0, -1):
if G[j] & 1<<k:
# There is a sphere but does the ray hit it ?
p = o + Vector3d(-k, 0.0, -j-4.0)
b = p @ d
c = p @ p - 1
q = b * b - c
# Does the ray hit the sphere
if q > 0:
# It does, compute the distance camera<->sphere
s = -b - sqrt(q)
if s < t and s > .01:
# This is the minimum distance, save it. And alos
# compute the bouncing ray vector into `n`.
t = s
n = !(p + d + t)
m = 2
return m
def sample(o: Vector3d, d: Vector3d) -> Vector3d:
"""
// (S)ample the world and return the pixel color for
// a ray passing by point o (Origin) and d (Direction)
v S(v o,v d){
f t;
v n;
//Search for an intersection ray Vs World.
i m=T(o,d,t,n);
if(!m) // m==0
//No sphere found and the ray goes upward: Generate a sky color
return v(.7,.6,1)*pow(1-d.z,4);
//A sphere was maybe hit.
v h=o+d*t, // h = intersection coordinate
l=!(v(9+R(),9+R(),16)+h*-1), // 'l' = direction to light (with random delta for soft-shadows).
r=d+n*(n%d*-2); // r = The half-vector
//Calculated the lambertian factor
f b=l%n;
//Calculate illumination factor (lambertian coefficient > 0 or in shadow)?
if(b<0||T(h,l,t,n))
b=0;
// Calculate the color 'p' with diffuse and specular component
f p=pow(l%r*(b>0),99);
if(m&1){ //m == 1
h=h*.2; //No sphere was hit and the ray was going downward: Generate a floor color
return((i)(ceil(h.x)+ceil(h.y))&1?v(3,1,1):v(3,3,3))*(b*.2+.1);
}
//m == 2 A sphere was hit. Cast an ray bouncing from the sphere surface.
return v(p,p,p)+S(h,r)*.5; //Attenuate color by 50% since it is bouncing (* .5)
}
"""
# Search for an intersection ray vs. world
m = intersection_test(o, d, t, n)
if not m:
# No sphere was found and the ray goes upward. Generate a sky colour
return Vector3d(.7, .6, 1.) * (1-d.z ** 4)
# A sphere was maybe hit
h = o + d * t # h = intersection co-ordinate
l = !(Vector3d(9 + random(), 9 + random(), 16) + h * -1) # l = direction to light (with random delta for soft-shadows)
r = d + n * (n @ d * -2) # r = The half-vector
b = l @ n # Calculate the lambertian factor
if b < 0 or intersection_test(h, l, t, n):
# Calculate the illumination factor (lambertian coefficient > 0 or in shadow) ?
b = 0
p = pow(l @ r * (b > 0), 99) # Calculate the colour `p` with diffuse and specular component
if m & 1:
h *= .2 # No sphere was hit and the ray was going downward. Generate a floor colour
return (ceil(h.x) + ceil(h.y)) & 1 ? Vector3d(3., 1., 1.) : Vector3d(3., 3., 3.) * (b * .2 + .1)
# m == 2 -> No sphere was hit. Cast a ray bounding from the sphere surface.
return Vector3d(p, p, p) + sample(h, r) * 0.5 # Attenuate colour by 50% since it is bouncing (* .5)
if __name__ == '__main__':
"""
i main(){
printf("P6 512 512 255 "); // The PPM Header is issued
// The '!' are for normalizing each vectors with ! operator.
v g=!v(-6,-16,0), // Camera direction
a=!(v(0,0,1)^g)*.002, // Camera up vector...Seem Z is pointing up :/ WTF !
b=!(g^a)*.002, // The right vector, obtained via traditional cross-product
c=(a+b)*-256+g; // WTF ? See https://news.ycombinator.com/item?id=6425965 for more.
for(i y=512;y--;) //For each column
for(i x=512;x--;){ //For each pixel in a line
//Reuse the vector class to store not XYZ but a RGB pixel color
v p(13,13,13); // Default pixel color is almost pitch black
//Cast 64 rays per pixel (For blur (stochastic sampling) and soft-shadows.
for(i r=64;r--;){
// The delta to apply to the origin of the view (For Depth of View blur).
v t=a*(R()-.5)*99+b*(R()-.5)*99; // A little bit of delta up/down and left/right
// Set the camera focal point v(17,16,8) and Cast the ray
// Accumulate the color returned in the p variable
p=S(v(17,16,8)+t, //Ray Origin
!(t*-1+(a*(R()+x)+b*(y+R())+c)*16) // Ray Direction with random deltas
// for stochastic sampling
)*3.5+p; // +p for color accumulation
}
printf("%c%c%c",(i)p.x,(i)p.y,(i)p.z);
}
}
"""
sys.stdout.write('P6 512 512 255 ')
# The `!` are for normalising each vectors with `!` operator
g = !Vector3d(-6., -16., 0.)
a = !(Vector3d(0., 0., 1.) ^ g) * .002
b = !(g ^ a) * .002
c = (a + b) * -256 + g
for y in range(512, 0, -1):
for x in range(512, 0, -1):
pixel = Vector3d(13., 13., 13.) # Reuse the vector class to store RGB values
# Cast 64 rays per pixel
for r in range(64, 0, -1):
t = a * (random() - .5) * 99 + b * (random() - .5) * 99
p = sample(
Vector3d(17., 16., 8.) + t, # Ray origin
# Ray direction with random deltas for stochastic sampling
!(t * -1 + (a * (random() + x) + b * (y + random()) + c ) * 16)
) * 3.5 + p # +p for colour accumulation
sys.stdout.write('{!c}{!c}{!c}')
|
ikarutoko/Tradercoin
|
Contrib/Spendfrom/spendfrom.py
|
#!/usr/bin/env python
#
# Use the raw transactions API to spend bitcoins received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a bitcoind or Bitcoin-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the bitcoin data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Bitcoin/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Bitcoin")
return os.path.expanduser("~/.bitcoin")
def read_bitcoin_config(dbdir):
"""Read the bitcoin.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "bitcoin.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a bitcoin JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 16452 if testnet else 6452
connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the bitcoind we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(bitcoind):
info = bitcoind.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
bitcoind.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = bitcoind.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(bitcoind):
address_summary = dict()
address_to_account = dict()
for info in bitcoind.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = bitcoind.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = bitcoind.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-bitcoin-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(bitcoind, fromaddresses, toaddress, amount, fee):
all_coins = list_available(bitcoind)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to bitcoind.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = bitcoind.createrawtransaction(inputs, outputs)
signed_rawtx = bitcoind.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(bitcoind, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = bitcoind.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(bitcoind, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = bitcoind.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(bitcoind, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get bitcoins from")
parser.add_option("--to", dest="to", default=None,
help="address to get send bitcoins to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of bitcoin.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
bitcoind = connect_JSON(config)
if options.amount is None:
address_summary = list_available(bitcoind)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(bitcoind) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(bitcoind, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(bitcoind, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = bitcoind.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
|
xtompok/uvod-do-prg
|
cv12/cv12.py
|
from operator import itemgetter
points = [{"coords":[14,48]},{"coords":[15,50]},
]
s = [p["coords"] for p in points]
s = [p["coords"] for p in points if p["coords"][0]>14]
print(s)
def twice(n):
return 2*n
numbers = ["12","14","23"]
nums = list(map(int,numbers))
doubled = list(map(twice,nums))
print(nums)
print(doubled)
s2 = list(map(itemgetter('coords'),points))
s3 = list(map(lambda p:p['coords'],points))
print(s2)
print(s3)
def bignum(n):
if n > 13:
return True
else:
return False
bignums = list(filter(bignum,nums))
bignums2 = list(filter(lambda n:n>13,nums))
print(bignums)
print(bignums2)
# TODO enumerate
|
paolo-losi/msgbox
|
msgbox/http.py
|
import time
import urllib
import urllib2
from Queue import Queue, Empty
from functools import partial
from threading import Thread, Lock
import tornado.httpserver
import tornado.ioloop
from tornado.web import HTTPError, RequestHandler, Application, asynchronous
from msgbox import logger
from msgbox.sim import sim_manager, TxSmsReq
ioloop = tornado.ioloop.IOLoop.instance()
# application/x-www-form-urlencoded
# params:
# key: "asds7878" (optional)
# recipient: "+393482222222"
# sender: "+393481111111"
# imsi: "21312123232"
# text: "sms text"
class MTHandler(RequestHandler):
@asynchronous
def post(self):
sender = self.get_argument('sender', None)
recipient = self.get_argument('recipient')
text = self.get_argument('text')
imsi = self.get_argument('imsi', None)
key = self.get_argument('key', None)
if not ((sender is None) ^ (imsi is None)):
err_msg = 'Use either "sender" or "imsi" params'
raise HTTPError(400, err_msg)
sim_manager.send(TxSmsReq(sender, recipient, text, imsi, key,
callback=self.reply_callback))
def reply_callback(self, response_dict):
ioloop.add_callback(partial(self.handle_reply, response_dict))
def handle_reply(self, response_dict):
log_method = logger.warn if response_dict['status'] == 'ERROR' else \
logger.info
log_method(response_dict['desc'])
self.write(response_dict)
self.finish()
class HTTPServerManager(object):
def __init__(self, port=8080):
app = Application([
(r"/send_sms", MTHandler),
])
self.port = port
self.http_server = tornado.httpserver.HTTPServer(app)
def start(self):
logger.info('http listening on port %s', self.port)
self.http_server.listen(self.port)
def stop(self):
self.http_server.stop()
http_server_manager = HTTPServerManager()
# ~~~~~~~~ HTTP Client Manager ~~~~~~~~
class HTTPClientManagerStoppingError(Exception): pass
class HTTPClientManager(object):
N_WORKERS = 10
def __init__(self):
self.active = False
self.workers = []
self.queue = Queue()
self.mutex = Lock()
def start(self):
self.active = True
for i in xrange(self.N_WORKERS):
thread = Thread(name='HttpClient %d' % i, target=self._work)
self.workers.append(thread)
thread.start()
def stop(self):
with self.mutex:
self.active = False
def _work(self):
while self.active or not self.queue.empty:
try:
msg_dict = self.queue.get(timeout=2)
except Empty:
continue
url = msg_dict.pop('url')
for attempt in xrange(3):
try:
msg_dict['text'] = msg_dict['text'].encode('utf8')
data = urllib.urlencode(msg_dict)
urllib2.urlopen(url, data, timeout=20)
logger.info('forwarded sms - sender=%s recipient=%s',
msg_dict['sender'], msg_dict['recipient'])
break
except Exception, e:
logger.error('error while sending msg', exc_info=True)
time.sleep(10)
else:
logger.error('giving up sending message %s', msg_dict,
exc_info=1)
def enqueue(self, rx_sms):
with self.mutex:
if self.active:
self.queue.put(rx_sms)
else:
raise HTTPClientManagerStoppingError()
http_client_manager = HTTPClientManager()
|
hakancelik96/coogger
|
core/cooggerapp/models/common.py
|
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django_page_views.templatetags.django_page_views import views_count
from django_vote_system.templatetags.vote import downvote_count, upvote_count
class Common(models.Model):
class Meta:
abstract = True
@property
def content_type_obj(self):
return ContentType.objects.get_for_model(self)
@property
def app_label(self):
return self.content_type_obj.app_label
@property
def model_name(self):
return self.content_type_obj.model
class Vote(models.Model):
class Meta:
abstract = True
@property
def upvote_count(self):
return upvote_count(self.__class__, self.id)
@property
def downvote_count(self):
return downvote_count(self.__class__, self.id)
class View:
id = None
class Meta:
abstract = True
@property
def views(self):
return views_count(self.__class__, self.id)
|
namaggarwal/splitwise
|
tests/test_getCurrentUser.py
|
from splitwise import Splitwise
import unittest
try:
from unittest.mock import patch
except ImportError: # Python 2
from mock import patch
@patch('splitwise.Splitwise._Splitwise__makeRequest')
class GetCurrentUserTestCase(unittest.TestCase):
def setUp(self):
self.sObj = Splitwise('consumerkey', 'consumersecret')
def test_getCurrentUser_success(self, mockMakeRequest):
mockMakeRequest.return_value = '{"user":{"id":12345,"first_name":"Naman","last_name":"Aggarwal","picture":{"small":"https://splitwise.s3.amazonaws.com/uploads/user/avatar/12345/small_mypic.jpg","medium":"https://splitwise.s3.amazonaws.com/uploads/user/avatar/12345/medium_mypic.jpg","large":"https://splitwise.s3.amazonaws.com/uploads/user/avatar/12345/large_mypic.jpg"},"custom_picture":true,"email":"naman@naman.com","registration_status":"confirmed","force_refresh_at":"2017-03-18T11:41:36Z","locale":"en","country_code":"IN","date_format":"MM/DD/YYYY","default_currency":"SGD","default_group_id":null,"notifications_read":"2020-06-10T14:12:01Z","notifications_count":8,"notifications":{"added_as_friend":true,"added_to_group":true,"expense_added":false,"expense_updated":false,"bills":true,"payments":true,"monthly_summary":true,"announcements":true}}}' # noqa: E501
user = self.sObj.getCurrentUser()
mockMakeRequest.assert_called_with(
"https://secure.splitwise.com/api/v3.0/get_current_user")
self.assertEqual(user.getId(), 12345)
self.assertEqual(user.getFirstName(), "Naman")
self.assertEqual(user.getLastName(), "Aggarwal")
self.assertEqual(user.getEmail(), "naman@naman.com")
self.assertEqual(user.getPicture().getSmall(),
"https://splitwise.s3.amazonaws.com/uploads/user/avatar/12345/small_mypic.jpg")
self.assertEqual(user.getPicture().getMedium(),
"https://splitwise.s3.amazonaws.com/uploads/user/avatar/12345/medium_mypic.jpg")
self.assertEqual(user.getPicture().getLarge(),
"https://splitwise.s3.amazonaws.com/uploads/user/avatar/12345/large_mypic.jpg")
self.assertEqual(user.getRegistrationStatus(), "confirmed")
def test_getCurrentUser_exception(self, mockMakeRequest):
mockMakeRequest.side_effect = Exception(
"Invalid response %s. Please check your consumer key and secret." % 404)
with self.assertRaises(Exception):
self.sObj.getCurrentUser()
mockMakeRequest.assert_called_with(
"https://secure.splitwise.com/api/v3.0/get_current_user")
|
neiltest/neil_learn_python
|
src/learn_python/python_other/neil_06_txt_split.py
|
#coding: utf-8
"""
@Author: Well
@Date: 2014 - 04 - 14
"""
"""
求某一个英文文本中完整句子的数目,
文本中只包含大小写字母、空格、“,”和“.”,
完整的句子是指以“.”结束,且“.”号前必须出现至少一个字母。
"""
import os
# 文件名
name_ = os.path.basename(__file__).split('.')[0]
dir_ = os.path.dirname(__file__)
# 绝对文件夹路径
# file2 = os.path.dirname(__file__)
# print file2
#
# # 绝对路径
#file3 = os.path.abspath(__file__)
# print file3
# txt文件的绝对路径
file_ = dir_ + '\\' + name_ + '.txt'
txt_ = file(file_, 'r')
txt_2 = txt_.read().split('.')
print len(txt_2) - 1
txt_.close()
|
tvd-dataset/tvd
|
tvd/rip/avconv.py
|
#!/usr/bin/env python
# encoding: utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2013-2015 CNRS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# AUTHORS
# Hervé BREDIN -- http://herve.niderb.fr/
from __future__ import unicode_literals
from .command import CommandWrapper
class AVConv(CommandWrapper):
"""Convert audio or video streams
Parameters
----------
avconv : str, optional.
Absolute path to `avconv` in case it is not reachable from PATH.
"""
def __init__(self, avconv=None):
if avconv is None:
avconv = 'avconv'
super(AVConv, self).__init__(avconv)
def audio_track(self, handbrake_to, stream, to):
"""
Extract audio track from HandBrake output
Parameters
----------
handbrake_to : str
Path to input file
stream : int
to : str
Path to output file
See savvyadmin.com/extract-audio-from-video-files-to-wav-using-ffmpeg
"""
options = [
'-y',
'-i', handbrake_to,
'-map', '0:{stream:d}'.format(stream=stream),
'-acodec', 'pcm_s16le',
'-ac', '1',
to
]
self.run_command(options=options, env=None)
# http://www.willus.com/author/streaming2.shtml
# -map option details can be foundhere :
# http://libav.org/avconv.html#Advanced-options
def mp4(self, handbrake_to, audio_stream, to):
options = [
'-y',
'-i', handbrake_to,
'-map', '0:0,0:0',
'-i_qfactor', '0.71',
'-qcomp', '0.6',
'-qmin', '10',
'-qmax', '63',
'-qdiff', '4',
'-trellis', '0',
'-vcodec', 'libx264',
'-b:v', '200k',
'-map', '0:{stream:d},0:0'.format(stream=audio_stream),
'-b:a', '56k',
'-ar', '22050',
'-ac', '2',
'-acodec', 'libvo_aacenc',
to
]
self.run_command(options=options, env=None)
def webm(self, handbrake_to, audio_stream, to):
options = [
'-y',
'-i', handbrake_to,
'-map', '0:0,0:0',
'-qmax', '63',
'-b:v', '200k',
'-vcodec', 'libvpx',
'-map', '0:{stream:d},0:0'.format(stream=audio_stream),
'-b:a', '56k',
'-ar', '22050',
'-ac', '2',
'-acodec', 'libvorbis',
to
]
self.run_command(options=options, env=None)
def ogv(self, handbrake_to, audio_stream, to):
options = [
'-y',
'-i', handbrake_to,
'-map', '0:0,0:0',
'-qmax', '63',
'-b:v', '200k',
'-vcodec', 'libtheora',
'-map', '0:{stream:d},0:0'.format(stream=audio_stream),
'-b:a', '56k',
'-ar', '22050',
'-ac', '2',
'-acodec', 'libvorbis',
to
]
self.run_command(options=options, env=None)
|
lukeyeager/compare-versions
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
# Get version
with open('compare_versions/version.py') as f:
exec(f.read())
# Get documentation
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='compare_versions',
version=__version__,
author='Luke Yeager',
author_email='luke.yeager@gmail.com',
url='https://github.com/lukeyeager/compare-versions',
description='Compare versions using various versioning schemes',
long_description=readme(),
scripts=[
'bin/compare_versions',
],
packages=find_packages(exclude=['tests', 'tests.*']),
test_suite='tests',
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
danhuss/faker
|
faker/providers/ssn/lb_LU/__init__.py
|
from .. import Provider as BaseProvider
class Provider(BaseProvider):
"""
A Faker provider for the Luxembourgish VAT IDs
"""
vat_id_formats = (
'LU########',
)
def vat_id(self):
"""
http://ec.europa.eu/taxation_customs/vies/faq.html#item_11
:return: a random Luxembourgish VAT ID
"""
return self.bothify(self.random_element(self.vat_id_formats))
|
brunosmmm/hdltools
|
hdltools/vcd/__init__.py
|
"""Value change dump stuff."""
class VCDObject:
"""Abstract VCD object class."""
class VCDScope(VCDObject):
"""VCD scope."""
def __init__(self, *scopes: str):
"""Initialize.
Parameters
----------
scopes
List of scope names
"""
self._scopes = []
for scope in scopes:
if not isinstance(scope, str):
raise TypeError("scope name must be string")
if len(scope) < 1:
# empty, ignore
continue
self._scopes.append(scope)
def __repr__(self):
"""Get representation."""
return "::".join(self._scopes)
def __len__(self):
"""Get scope length."""
return len(self._scopes)
def __getitem__(self, idx):
"""Get scope by index."""
return self._scopes[idx]
def __eq__(self, other):
"""Scope equality."""
# FIXME: raising TypeError is weird
if not isinstance(other, VCDScope):
raise TypeError(
f"other must be a VCDScope object, got {type(other)}"
)
return self._scopes == other._scopes
def __hash__(self):
"""Get hash."""
return hash(tuple(self._scopes))
def contains(self, other: "VCDScope") -> bool:
"""Get whether this scope contains other scope.
Arguments
----------
other
other scope to compare against
"""
if not isinstance(other, VCDScope):
raise TypeError("other must be a VCDScope object")
if len(self) >= len(other):
# cannot contain, length must be less
return False
for idx, this_subscope in enumerate(self._scopes):
if other[idx] != this_subscope:
return False
return True
@staticmethod
def from_str(scope_str: str) -> "VCDScope":
"""Build from string."""
if not isinstance(scope_str, str):
raise TypeError("must be a string")
scopes = scope_str.split("::")
inclusive = len(scopes[-1]) < 1
return (VCDScope(*scopes), inclusive)
def pack(self) -> str:
"""Pack."""
return str(self)
|
arielmakestuff/loadlimit
|
test/unit/stat/test_flushtosql.py
|
# -*- coding: utf-8 -*-
# test/unit/stat/test_flushtosql.py
# Copyright (C) 2016 authors and contributors (see AUTHORS file)
#
# This module is released under the MIT License.
"""Test flushtosql()"""
# ============================================================================
# Imports
# ============================================================================
# Stdlib imports
import asyncio
from functools import partial
# Third-party imports
from pandas import DataFrame, read_sql_table, Series, Timestamp, to_timedelta
import pytest
from sqlalchemy import create_engine
# Local imports
import loadlimit.channel as channel
from loadlimit.core import BaseLoop
from loadlimit.result import SQLTotal
import loadlimit.stat as stat
from loadlimit.stat import CountStore, SendTimeData
from loadlimit.util import aiter
# ============================================================================
# Fixtures
# ============================================================================
@pytest.fixture
def fake_flushtosql(monkeypatch):
"""Setup fake flushtosql callable"""
fake_flushtosql = stat.FlushToSQL()
monkeypatch.setattr(stat, 'flushtosql', fake_flushtosql)
pytestmark = pytest.mark.usefixtures('fake_shutdown_channel',
'fake_timedata_channel')
# ============================================================================
# Tests
# ============================================================================
@pytest.mark.parametrize('num', [10, 12])
def test_flushtosql(testloop, num):
"""updateperiod updates statsdict with timeseries data points
num fixture allows testing the flushtosql_shutdown coro func for:
* all data has already been flushed to the sql db
* there's still some data remaining that needs to be flushed to sql db
"""
measure = CountStore()
# Setup sqlalchemy engine
engine = create_engine('sqlite://')
timetotal = SQLTotal(sqlengine=engine, countstore=measure)
# Create coro to time
@measure(name='churn')
async def churn(i):
"""Do nothing"""
await asyncio.sleep(0)
async def run():
"""run"""
async for i in aiter(range(num)):
await churn(i)
await channel.shutdown.send(0)
# Setup SendTimeData
send = SendTimeData(measure, flushwait=to_timedelta(0, unit='s'),
channel=stat.timedata)
# Add to shutdown channel
channel.shutdown(send.shutdown)
channel.shutdown(partial(stat.flushtosql_shutdown,
statsdict=timetotal.statsdict, sqlengine=engine))
channel.shutdown(stat.timedata.shutdown)
# Add flushtosql to timedata event
stat.timedata(stat.flushtosql)
# Run all the tasks
with BaseLoop() as main:
# Schedule SendTimeData coro
asyncio.ensure_future(send())
# Start every event, and ignore events that don't have any tasks
stat.timedata.open()
stat.timedata.start(asyncfunc=False, statsdict=timetotal.statsdict,
flushlimit=5, sqlengine=engine)
asyncio.ensure_future(run())
main.start()
assert timetotal.statsdict.numdata == 0
df = timetotal()
assert isinstance(df, DataFrame)
assert not df.empty
# ============================================================================
# Test FlushToSQL.flushdata()
# ============================================================================
@pytest.mark.parametrize('exctype', [Exception, RuntimeError, ValueError])
def test_flushdata_nodata(sqlengine, exctype):
"""Store error data in sqlite db"""
statsdict = stat.Period()
key = 'hello'
namekey = 'world'
sqltbl = 'period'
tblname = '{}_{}'.format(sqltbl, namekey)
end = Timestamp.now(tz='UTC')
delta = to_timedelta(5, unit='s').total_seconds()
err = exctype(42)
data = Series([end, 1/5, delta, repr(err), 1],
index=['end', 'rate', 'response', 'error', 'count'])
statsdict.adderror(key, data)
# Send data to sqlite db
with sqlengine.begin() as conn:
stat.flushtosql.flushdata(statsdict, key, sqltbl, namekey, sqlengine,
conn)
# Check sqlite db
with sqlengine.begin() as conn:
assert not sqlengine.dialect.has_table(conn, tblname)
# ============================================================================
# Test FlushToSQL.flusherror()
# ============================================================================
@pytest.mark.parametrize('exctype', [Exception, RuntimeError, ValueError])
def test_flusherror(sqlengine, exctype):
"""Store error data in sqlite db"""
statsdict = stat.Period()
key = 'hello'
namekey = 'world'
sqltbl = 'period'
tblname = '{}_error_{}'.format(sqltbl, namekey)
end = Timestamp.now(tz='UTC')
delta = to_timedelta(5, unit='s').total_seconds()
err = exctype(42)
data = Series([end, 1/5, delta, repr(err), 1],
index=['end', 'rate', 'response', 'error', 'count'])
statsdict.adderror(key, data)
# Send data to sqlite db
with sqlengine.begin() as conn:
stat.flushtosql.flusherror(statsdict, key, sqltbl, namekey, sqlengine,
conn)
# Check sqlite db
with sqlengine.begin() as conn:
assert sqlengine.dialect.has_table(conn, tblname)
df = read_sql_table(tblname, conn, index_col='index',
parse_dates={'end': dict(utc=True)})
assert len(df.index) == 1
assert df.iloc[0].error == repr(err)
# ============================================================================
# Test FlushToSQL.flushfailure()
# ============================================================================
def test_flushfailure(sqlengine):
"""Store error data in sqlite db"""
statsdict = stat.Period()
key = 'hello'
namekey = 'world'
sqltbl = 'period'
tblname = '{}_failure_{}'.format(sqltbl, namekey)
end = Timestamp.now(tz='UTC')
delta = to_timedelta(5, unit='s').total_seconds()
err = stat.Failure(42)
data = Series([end, 1/5, delta, str(err.args[0]), 1],
index=['end', 'rate', 'response', 'failure', 'count'])
statsdict.addfailure(key, data)
# Send data to sqlite db
with sqlengine.begin() as conn:
stat.flushtosql.flushfailure(statsdict, key, sqltbl, namekey,
sqlengine, conn)
# Check sqlite db
with sqlengine.begin() as conn:
assert sqlengine.dialect.has_table(conn, tblname)
df = read_sql_table(tblname, conn, index_col='index',
parse_dates={'end': dict(utc=True)})
assert len(df.index) == 1
assert df.iloc[0].failure == str(err.args[0])
# ============================================================================
#
# ============================================================================
|
FeatherCoin/Feathercoin
|
test/functional/p2p_feefilter.py
|
#!/usr/bin/env python3
# Copyright (c) 2016-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test processing of feefilter messages."""
from decimal import Decimal
import time
from test_framework.messages import msg_feefilter
from test_framework.mininode import mininode_lock, P2PInterface
from test_framework.test_framework import BitcoinTestFramework
def hashToHex(hash):
return format(hash, '064x')
# Wait up to 60 secs to see if the testnode has received all the expected invs
def allInvsMatch(invsExpected, testnode):
for x in range(60):
with mininode_lock:
if (sorted(invsExpected) == sorted(testnode.txinvs)):
return True
time.sleep(1)
return False
class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.txinvs = []
def on_inv(self, message):
for i in message.inv:
if (i.type == 1):
self.txinvs.append(hashToHex(i.hash))
def clear_invs(self):
with mininode_lock:
self.txinvs = []
class FeeFilterTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [[
"-mintxfee=0.00001"
] for i in range(self.num_nodes)]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
node1 = self.nodes[1]
node0 = self.nodes[0]
# Get out of IBD
node1.generate(1)
self.sync_blocks()
self.nodes[0].add_p2p_connection(TestP2PConn())
# Test that invs are received for all txs at feerate of 20 sat/byte
node1.settxfee(Decimal("0.00020000"))
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
assert allInvsMatch(txids, self.nodes[0].p2p)
self.nodes[0].p2p.clear_invs()
# Set a filter of 15 sat/byte
self.nodes[0].p2p.send_and_ping(msg_feefilter(15000))
# Test that txs are still being received (paying 20 sat/byte)
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
assert allInvsMatch(txids, self.nodes[0].p2p)
self.nodes[0].p2p.clear_invs()
# Change tx fee rate to 10 sat/byte and test they are no longer received
node1.settxfee(Decimal("0.00010000"))
[node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
self.sync_mempools() # must be sure node 0 has received all txs
# Send one transaction from node0 that should be received, so that we
# we can sync the test on receipt (if node1's txs were relayed, they'd
# be received by the time this node0 tx is received). This is
# unfortunately reliant on the current relay behavior where we batch up
# to 35 entries in an inv, which means that when this next transaction
# is eligible for relay, the prior transactions from node1 are eligible
# as well.
node0.settxfee(Decimal("0.00020000"))
txids = [node0.sendtoaddress(node0.getnewaddress(), 1)]
assert allInvsMatch(txids, self.nodes[0].p2p)
self.nodes[0].p2p.clear_invs()
# Remove fee filter and check that txs are received again
self.nodes[0].p2p.send_and_ping(msg_feefilter(0))
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
assert allInvsMatch(txids, self.nodes[0].p2p)
self.nodes[0].p2p.clear_invs()
if __name__ == '__main__':
FeeFilterTest().main()
|
adriansoghoian/security-at-home
|
models.py
|
import datetime
import os
from time import strftime
from reportlab.lib.colors import black
from reportlab.lib.pagesizes import letter
from reportlab.lib.enums import TA_JUSTIFY
from reportlab.pdfbase import pdfmetrics
from reportlab.platypus import Paragraph, Spacer, Table, PageTemplate, \
BaseDocTemplate, Frame, Image
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.units import inch
from reportlab.platypus.flowables import HRFlowable
import requests
report_time = datetime.datetime.now().ctime()
class Report:
title = "Canary Cyber Security Report"
def __init__(self, hosts, router_status=True):
global report_time
self.hosts = hosts
self.router_status = router_status
self.title = str(report_time) + ".pdf"
self.template = BaseDocTemplate("reports/" + self.title,
pageTemplates=[PageTemplate(id="first_page", frames=[
Frame(inch, inch, 6.5 * inch, 9 * inch, topPadding=.6 * inch,
showBoundary=0)], onPage=FirstPageSetup, pagesize=letter)],
rightMargin=72, leftMargin=72,
topMargin=72, bottomMargin=18,
font='Courier', fontSize=12)
self.styles = getSampleStyleSheet()
@classmethod
def get_nvd_url(cls, host):
manufacturer_str = host.manufacturer
if " " in manufacturer_str:
manufacturer_str = manufacturer_str.split(" ")[0]
url = "https://web.nvd.nist.gov/view/vuln/search-results?query=%s&search_type=all&cves=on" % (manufacturer_str)
return url
def generate(self):
Story = []
self.styles.add(ParagraphStyle(name='Justify', alignment=TA_JUSTIFY))
# title = "<font size=24><b>%s</b></font>" % (Report.title)
# Story.append(Paragraph(title, self.styles['Normal']))
Story.append(Spacer(1, 12))
string = """<font size=12>Here is a status report about your home network.
Please review it and follow up with appropriate actions.
Remember that over two third of personal network intrusions occur
due to weak passwords on your routers and devices.</font>"""
Story.append(Paragraph(string, self.styles['Normal']))
Story.append(Spacer(2, 12))
sorted(self.hosts, key=type, reverse=True)
for i, host in enumerate(self.hosts):
if i == 0:
Story.append(Image('res/wifi.png'))
string = "<font size=16><b>Your router:</b></font>"
Story.append(Paragraph(string, self.styles['Normal']))
Story.append(Spacer(1, 16))
if not self.router_status:
string = """<font color=red size=12><b>Admin Page Status: </b>Not secured<br/><br/>Please change \
the password for your router admin page <a href='%s' color=blue>here.</a></font>""" % str(
"http://" + host.ip)
else:
string = "<font size=12><b>Admin Page Status: </b>" + "Secure</font>"
Story.append(Paragraph(string, self.styles['Normal']))
Story.append(Spacer(1, 16))
if i == 1:
Story.append(Image('res/map.png'))
string = "<font size=16><b>Your devices:</b></font>"
Story.append(Paragraph(string, self.styles['Normal']))
Story.append(Spacer(1, 16))
string = "<font size=12><b>MAC Address:</b> " + host.mac_address + "</font>"
Story.append(Paragraph(string, self.styles['Normal']))
Story.append(Spacer(1, 12))
string = "<font size=12><b>IP Address:</b> " + host.ip + "</font>"
Story.append(Paragraph(string, self.styles['Normal']))
Story.append(Spacer(1, 12))
string = "<font size=12><b>Manufacturer:</b> " + host.manufacturer + "</font>"
Story.append(Paragraph(string, self.styles['Normal']))
Story.append(Spacer(1, 12))
if host.manufacturer != "Unknown":
string = """<font size=12><b>Major issues associated with manufacturer:</b>
Please check out the <a href='%s' color='blue'>National Vulnerability
Database</a> for a list of current issues related to %s products.</font>
""" % (Report.get_nvd_url(host), host.manufacturer)
Story.append(Paragraph(string, self.styles['Normal']))
Story.append(Spacer(1, 12))
string = "<font size=12><b>Number of open ports:</b> " + str(len(host.open_ports)) + "</font>"
if len(host.open_ports) > 0:
if len(host.open_ports) > 2:
string += "<font color=red><br/>Please seek ways to close more ports.</font>"
Story.append(Paragraph(string, self.styles['Normal']))
Story.append(Spacer(1, 12))
port_data = [["NUMBER", "SERVICE", "NOTES"]]
L = 1
for port in host.open_ports:
row = [port.number, port.port_status]
port_site = "http://www.speedguide.net/port.php?port=%s" % port.number
google_port = "https://www.google.com/#q=port+%s" % port.number
check_site = requests.get(port_site)
if check_site.status_code == 200:
link_txt = 'More info here'
link = """<a href="%s">%s</a>""" % (port_site,link_txt)
else:
link_txt = 'Unknown port, search here'
link = """<a href="%s">%s</a>""" % (google_port,link_txt)
if pdfmetrics.stringWidth(link_txt,'Courier',12) > L:
L = pdfmetrics.stringWidth(link_txt,'Courier',12) + 1
inside_Table = Table([[Paragraph(link,self.styles['Normal'])]], colWidths=L)
row.append(inside_Table)
port_data.append(row)
port_table = Table(port_data)
Story.append(port_table)
Story.append(Spacer(3, 12))
if i < len(self.hosts) - 1:
Story.append(HRFlowable(color=black))
Story.append(Spacer(1, 12))
self.template.build(Story)
def FirstPageSetup(canvas, doc):
global report_time
canvas.saveState()
#Background image
canvas.drawImage(os.path.curdir + "/res/report_base.png", 0, 0, width=8.5 * inch, height=11.0 * inch)
#Footer
canvas.drawString(0.5 * inch, 0.5 * inch, 'Canary Security Report for ' + str(report_time))
canvas.drawRightString(8.0 * inch, 0.5 * inch, 'Page %d' % (doc.page))
canvas.restoreState()
class Host:
count = 0
def __init__(self, os="Unknown", ip="Unknown", manufacturer="Unknown", mac_address="Unknown", open_ports=[],
is_down=False):
self.os = os
self.ip = ip
self.manufacturer = manufacturer
self.open_ports = open_ports
self.mac_address = mac_address
self.is_down = False
Host.count += 1
def add_port(self, port):
self.open_ports.append(port)
@classmethod
def flag_router(cls, hosts):
for each in hosts:
return True
@classmethod
def return_num_hosts(cls):
return str(cls.count)
def display_summary(self):
port_string = ""
if len(self.open_ports) > 0:
for each in self.open_ports:
port_string += str(each.number) + " "
else:
port_string = "None detected yet."
print "OS: ", self.os, ",manufacturer: ", self.manufacturer, ", MAC Address: ", self.mac_address, ", Ports: ", port_string
class Router(Host):
def __init__(self, is_secured=True, os="Unknown", ip="Unknown", manufacturer="Unknown", mac_address="Unknown",
open_ports=[], is_down=False):
Host.__init__(self, os, ip, manufacturer, mac_address, open_ports, is_down)
self.is_secured = is_secured
class Port:
def __init__(self, number, port_service, port_status):
self.number = number
self.port_service = port_service
self.port_status = port_status
|
q1x/zabbix-gnomes
|
ztmplimport.py
|
#!/usr/bin/env python
#
# import needed modules.
# pyzabbix is needed, see https://github.com/lukecyca/pyzabbix
#
import argparse
import ConfigParser
import os
import os.path
import sys
import distutils.util
from xml.etree import ElementTree as ET
from pyzabbix import ZabbixAPI
# define config helper function
def ConfigSectionMap(section):
dict1 = {}
options = Config.options(section)
for option in options:
try:
dict1[option] = Config.get(section, option)
if dict1[option] == -1:
DebugPrint("skip: %s" % option)
except:
print("exception on %s!" % option)
dict1[option] = None
return dict1
def PrintError(error):
if args.continue_on_error:
sys.stderr.write(error + '\n')
else:
sys.exit(error)
# set default vars
defconf = os.getenv("HOME") + "/.zbx.conf"
username = ""
password = ""
api = ""
noverify = ""
# Define commandline arguments
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,description='Imports Zabbix templates from XML files. Default behaviour is to add missing elements and update any existing elements. Optionally elements that are missing from the XML file can be removed from existing template(s) as well.', epilog="""
This program can use .ini style configuration files to retrieve the needed API connection information.
To use this type of storage, create a conf file (the default is $HOME/.zbx.conf) that contains at least the [Zabbix API] section and any of the other parameters:
[Zabbix API]
username=johndoe
password=verysecretpassword
api=https://zabbix.mycompany.com/path/to/zabbix/frontend/
no_verify=true
""")
parser.add_argument('-u', '--username', help='User for the Zabbix api')
parser.add_argument('-p', '--password', help='Password for the Zabbix api user')
parser.add_argument('-a', '--api', help='Zabbix API URL')
parser.add_argument('--no-verify', help='Disables certificate validation when using a secure connection',action='store_true')
parser.add_argument('-c','--config', help='Config file location (defaults to $HOME/.zbx.conf)')
parser.add_argument('-v', '--verbose', help='Enables verbose output.',action='store_true')
parser.add_argument('-T', '--templates', help='List of XML template files to import.',required=True, nargs="+")
parser.add_argument('-D', '--delete-missing', help='If a template already exists in Zabbix, any missing elements from the .XML will be removed from Zabbix as well.',action='store_true')
parser.add_argument('-U', '--update', help='If a template already exists in Zabbix, update any changes in template elements.',action='store_true')
parser.add_argument('-C', '--continue-on-error', help='Continue on error, use with caution.',action='store_true')
args = parser.parse_args()
# load config module
Config = ConfigParser.ConfigParser()
Config
# if configuration argument is set, test the config file
if args.config:
if os.path.isfile(args.config) and os.access(args.config, os.R_OK):
Config.read(args.config)
# if not set, try default config file
else:
if os.path.isfile(defconf) and os.access(defconf, os.R_OK):
Config.read(defconf)
# try to load available settings from config file
try:
username=ConfigSectionMap("Zabbix API")['username']
password=ConfigSectionMap("Zabbix API")['password']
api=ConfigSectionMap("Zabbix API")['api']
noverify=bool(distutils.util.strtobool(ConfigSectionMap("Zabbix API")["no_verify"]))
except:
pass
# override settings if they are provided as arguments
if args.username:
username = args.username
if args.password:
password = args.password
if args.api:
api = args.api
if args.no_verify:
noverify = args.no_verify
# test for needed params
if not username:
sys.exit("Error: API User not set")
if not password:
sys.exit("Error: API Password not set")
if not api:
sys.exit("Error: API URL is not set")
# Setup Zabbix API connection
zapi = ZabbixAPI(api)
if noverify is True:
zapi.session.verify = False
# Login to the Zabbix API
zapi.login(username, password)
##################################
# Start actual API logic
##################################
# We need the API version to know if valuemap importing is supported
zversion=zapi.apiinfo.version()
# set import modes
create=True
if args.update:
update=True
else:
update=False
if args.delete_missing:
delete=True
else:
delete=False
# set import rules, see https://www.zabbix.com/documentation/3.0/manual/api/reference/configuration/import
rules={}
rules['templates']={'createMissing': create, 'updateExisting':update}
rules['applications']={'createMissing': create, 'updateExisting': update, 'deleteMissing': delete}
rules['discoveryRules']={'createMissing': create, 'updateExisting': update, 'deleteMissing': delete}
rules['graphs']={'createMissing': create, 'updateExisting':update, 'deleteMissing': delete}
rules['groups']={'createMissing': create}
rules['items']={'createMissing': create, 'updateExisting':update, 'deleteMissing': delete}
rules['templateLinkage']={'createMissing': create}
rules['templateScreens']={'createMissing': create, 'updateExisting':update, 'deleteMissing': delete}
rules['triggers']={'createMissing': create, 'updateExisting':update, 'deleteMissing': delete}
# Valuemap imports are a Zabbix 3.x.x feature
if zversion.startswith('3.'):
rules['valueMaps']={'createMissing':create, 'updateExisting':update}
# Parse file list
for template in args.templates:
Continue=True
if Continue:
try:
# open file for reading
with file(template) as f:
xml = f.read()
f.close()
except:
# If the file can't be opened, exit with error
error="Error: Something went wrong when trying to read the file" + template
PrintError(error)
Continue=False
if Continue:
try:
# verify if the file is a valid XML
tree = ET.fromstring(xml)
except:
# If the file can't isn't a valid XML, exit with error
error="Error: XML is not valid in " + template
PrintError(error)
Continue=False
if Continue:
try:
# Everything looks good, let's try to import
result=zapi.configuration.Import(format="xml",rules=rules, source=xml)
if args.verbose:
print("Succesfully imported " + template)
except:
# Something went wrong with the API call or import
error="Error: Something went wrong while importing " + template + "\n" + str(sys.exc_info()[1][0])
PrintError(error)
Continue=False
# And we're done...
|
ilblackdragon/pymisc
|
pymisc/abstract.py
|
__author__ = 'ilblackdragon@gmail.com'
from pymisc import log, decorators
class RegisterSystem(object):
interfaces = []
classes = []
@classmethod
@decorators.logprint(log)
def register(self, cls):
if cls.__name__[0] == 'I':
print("Regirstring interface `%s`" % cls.__name__)
RegisterSystem.interfaces.append(cls)
else:
print("Regirstring class `%s`" % cls.__name__)
RegisterSystem.classes.append(cls)
class InterfaceMeta(type):
def __new__(cls, name, bases, dict):
res = super(InterfaceMeta, cls).__new__(cls, name, bases, dict)
RegisterSystem.register(res)
if name[0] == 'I':
res.children = []
else:
for b in bases:
if b.children is not None:
b.children.append(res)
return res
class IBase(object):
__metaclass__ = InterfaceMeta
@classmethod
def send_signal(cls, method, *args, **kwargs):
res = []
for child in cls.children:
res.append((child, child.__dict__[method].__get__('')(*args, **kwargs)))
return res
|
mjlong/openmc
|
examples/python/lattice/nested/build-xml.py
|
import openmc
###############################################################################
# Simulation Input File Parameters
###############################################################################
# OpenMC simulation parameters
batches = 20
inactive = 10
particles = 10000
###############################################################################
# Exporting to OpenMC materials.xml File
###############################################################################
# Instantiate some Nuclides
h1 = openmc.Nuclide('H-1')
o16 = openmc.Nuclide('O-16')
u235 = openmc.Nuclide('U-235')
# Instantiate some Materials and register the appropriate Nuclides
fuel = openmc.Material(material_id=1, name='fuel')
fuel.set_density('g/cc', 4.5)
fuel.add_nuclide(u235, 1.)
moderator = openmc.Material(material_id=2, name='moderator')
moderator.set_density('g/cc', 1.0)
moderator.add_nuclide(h1, 2.)
moderator.add_nuclide(o16, 1.)
moderator.add_s_alpha_beta('HH2O', '71t')
# Instantiate a MaterialsFile, register all Materials, and export to XML
materials_file = openmc.MaterialsFile()
materials_file.default_xs = '71c'
materials_file.add_materials([moderator, fuel])
materials_file.export_to_xml()
###############################################################################
# Exporting to OpenMC geometry.xml File
###############################################################################
# Instantiate Surfaces
left = openmc.XPlane(surface_id=1, x0=-2, name='left')
right = openmc.XPlane(surface_id=2, x0=2, name='right')
bottom = openmc.YPlane(surface_id=3, y0=-2, name='bottom')
top = openmc.YPlane(surface_id=4, y0=2, name='top')
fuel1 = openmc.ZCylinder(surface_id=5, x0=0, y0=0, R=0.4)
fuel2 = openmc.ZCylinder(surface_id=6, x0=0, y0=0, R=0.3)
fuel3 = openmc.ZCylinder(surface_id=7, x0=0, y0=0, R=0.2)
left.boundary_type = 'vacuum'
right.boundary_type = 'vacuum'
top.boundary_type = 'vacuum'
bottom.boundary_type = 'vacuum'
# Instantiate Cells
cell1 = openmc.Cell(cell_id=1, name='Cell 1')
cell2 = openmc.Cell(cell_id=2, name='Cell 2')
cell3 = openmc.Cell(cell_id=101, name='cell 3')
cell4 = openmc.Cell(cell_id=102, name='cell 4')
cell5 = openmc.Cell(cell_id=201, name='cell 5')
cell6 = openmc.Cell(cell_id=202, name='cell 6')
cell7 = openmc.Cell(cell_id=301, name='cell 7')
cell8 = openmc.Cell(cell_id=302, name='cell 8')
# Use surface half-space to define regions
cell1.region = +left & -right & +bottom & -top
cell2.region = +left & -right & +bottom & -top
cell3.region = -fuel1
cell4.region = +fuel1
cell5.region = -fuel2
cell6.region = +fuel2
cell7.region = -fuel3
cell8.region = +fuel3
# Register Materials with Cells
cell3.fill = fuel
cell4.fill = moderator
cell5.fill = fuel
cell6.fill = moderator
cell7.fill = fuel
cell8.fill = moderator
# Instantiate Universe
univ1 = openmc.Universe(universe_id=1)
univ2 = openmc.Universe(universe_id=2)
univ3 = openmc.Universe(universe_id=3)
univ4 = openmc.Universe(universe_id=5)
root = openmc.Universe(universe_id=0, name='root universe')
# Register Cells with Universe
univ1.add_cells([cell3, cell4])
univ2.add_cells([cell5, cell6])
univ3.add_cells([cell7, cell8])
root.add_cell(cell1)
univ4.add_cell(cell2)
# Instantiate nested Lattices
lattice1 = openmc.RectLattice(lattice_id=4, name='4x4 assembly')
lattice1.dimension = [2, 2]
lattice1.lower_left = [-1., -1.]
lattice1.pitch = [1., 1.]
lattice1.universes = [[univ1, univ2],
[univ2, univ3]]
lattice2 = openmc.RectLattice(lattice_id=6, name='4x4 core')
lattice2.dimension = [2, 2]
lattice2.lower_left = [-2., -2.]
lattice2.pitch = [2., 2.]
lattice2.universes = [[univ4, univ4],
[univ4, univ4]]
# Fill Cell with the Lattice
cell1.fill = lattice2
cell2.fill = lattice1
# Instantiate a Geometry and register the root Universe
geometry = openmc.Geometry()
geometry.root_universe = root
# Instantiate a GeometryFile, register Geometry, and export to XML
geometry_file = openmc.GeometryFile()
geometry_file.geometry = geometry
geometry_file.export_to_xml()
###############################################################################
# Exporting to OpenMC settings.xml File
###############################################################################
# Instantiate a SettingsFile, set all runtime parameters, and export to XML
settings_file = openmc.SettingsFile()
settings_file.batches = batches
settings_file.inactive = inactive
settings_file.particles = particles
settings_file.set_source_space('box', [-1, -1, -1, 1, 1, 1])
settings_file.export_to_xml()
###############################################################################
# Exporting to OpenMC plots.xml File
###############################################################################
plot = openmc.Plot(plot_id=1)
plot.origin = [0, 0, 0]
plot.width = [4, 4]
plot.pixels = [400, 400]
plot.color = 'mat'
# Instantiate a PlotsFile, add Plot, and export to XML
plot_file = openmc.PlotsFile()
plot_file.add_plot(plot)
plot_file.export_to_xml()
###############################################################################
# Exporting to OpenMC tallies.xml File
###############################################################################
# Instantiate a tally mesh
mesh = openmc.Mesh(mesh_id=1)
mesh.type = 'regular'
mesh.dimension = [4, 4]
mesh.lower_left = [-2, -2]
mesh.width = [1, 1]
# Instantiate tally Filter
mesh_filter = openmc.Filter()
mesh_filter.mesh = mesh
# Instantiate the Tally
tally = openmc.Tally(tally_id=1)
tally.add_filter(mesh_filter)
tally.add_score('total')
# Instantiate a TalliesFile, register Tally/Mesh, and export to XML
tallies_file = openmc.TalliesFile()
tallies_file.add_mesh(mesh)
tallies_file.add_tally(tally)
tallies_file.export_to_xml()
|
lunixbochs/sublimelint
|
sublimelint.py
|
# sublimelint.py
# SublimeLint is a code checking framework for Sublime Text
#
# Project: https://github.com/lunixbochs/sublimelint
# License: MIT
import sublime
import sublime_plugin
import os
from threading import Thread
import time
import json
from .lint.edit import apply_sublimelint_edit
from .lint.edit import Edit
from .lint.modules import Modules
from .lint.linter import Linter
from .lint.highlight import HighlightSet
from .lint.update import update
from .lint import persist
def plugin_loaded():
user_path = os.path.join(sublime.packages_path(), 'User', 'linters')
persist.modules = Modules(user_path).load_all()
persist.reinit()
Thread(target=update, args=(user_path,)).start()
class SublimeLint(sublime_plugin.EventListener):
def __init__(self, *args, **kwargs):
sublime_plugin.EventListener.__init__(self, *args, **kwargs)
self.loaded = set()
self.linted = set()
self.last_syntax = {}
persist.queue.start(self.lint)
# this gives us a chance to lint the active view on fresh install
window = sublime.active_window()
if window:
self.on_activated(window.active_view())
self.start = time.time()
@classmethod
def lint(cls, view_id, callback=None):
callback = callback or cls.finish
view = Linter.get_view(view_id)
sections = {}
for sel, _ in Linter.get_selectors(view_id):
sections[sel] = []
for result in view.find_by_selector(sel):
sections[sel].append(
(view.rowcol(result.a)[0], result.a, result.b)
)
if view is not None:
filename = view.file_name()
code = Linter.text(view)
args = (view_id, filename, code, sections, callback)
Linter.lint_view(*args)
@classmethod
def finish(cls, view, linters):
errors = {}
highlights = HighlightSet()
for linter in linters:
if linter.highlights:
highlights.add(linter.highlights)
if linter.errors:
errors.update(linter.errors)
highlights.clear(view)
highlights.draw(view)
persist.errors[view.id()] = errors
# helpers
def hit(self, view):
self.linted.add(view.id())
if view.size() == 0:
for l in Linter.get_linters(view.id()):
l.clear()
return
persist.queue.hit(view)
def check_syntax(self, view, lint=False):
vid = view.id()
syntax = view.settings().get('syntax')
# syntax either has never been set or just changed
if not vid in self.last_syntax or self.last_syntax[vid] != syntax:
self.last_syntax[vid] = syntax
# assign a linter, then maybe trigger a lint if we get one
if Linter.assign(view) and lint:
self.hit(view)
# callins
def on_modified(self, view):
self.check_syntax(view)
self.hit(view)
def on_modified_async(self, view):
self.on_selection_modified_async(view)
def on_load(self, view):
self.on_new(view)
def on_activated_async(self, view):
persist.reinit()
if not view:
return
self.check_syntax(view, True)
view_id = view.id()
if not view_id in self.linted:
if not view_id in self.loaded:
# it seems on_activated can be called before loaded on first start
if time.time() - self.start < 5: return
self.on_new(view)
self.hit(view)
self.on_selection_modified_async(view)
def on_open_settings(self, view):
# handle opening user preferences file
filename = view.file_name()
if filename:
dirname = os.path.basename(os.path.dirname(filename))
filename = os.path.basename(filename)
if filename != 'SublimeLint.sublime-settings':
return
if dirname.lower() == 'sublimelint':
return
persist.reinit()
settings = persist.settings
# fill in default plugin settings
plugins = settings.pop('plugins', {})
for name, language in persist.languages.items():
default = language.get_settings().copy()
default.update(plugins.pop(name, {}))
plugins[name] = default
settings['plugins'] = plugins
with Edit(view) as edit:
j = json.dumps({'user': settings}, indent=4, sort_keys=True)
j = j.replace(' \n', '\n')
edit.replace(sublime.Region(0, view.size()), j)
view.run_command('save')
def on_new(self, view):
self.on_open_settings(view)
vid = view.id()
self.loaded.add(vid)
self.last_syntax[vid] = view.settings().get('syntax')
Linter.assign(view)
def on_post_save(self, view):
# this will reload linters if they are saved with sublime text
for name, module in persist.modules.items():
if os.name == 'posix' and (
os.stat(module.__file__).st_ino == os.stat(view.file_name()).st_ino
) or module.__file__ == view.file_name():
persist.debug("reloading module '{}'".format(name))
persist.modules.reload(module)
Linter.reload(name)
break
# linting here doesn't matter, because we lint on load and on modify
# self.hit(view)
def on_selection_modified_async(self, view):
vid = view.id()
try:
lineno = view.rowcol(view.sel()[0].end())[0]
except IndexError:
lineno = -1
status = ''
if vid in persist.errors:
errors = persist.errors[vid]
if errors:
plural = 's' if len(errors) > 1 else ''
if lineno in errors:
if plural:
num = sorted(list(errors)).index(lineno) + 1
status += '%i/%i errors: ' % (num, len(errors))
# sublime statusbar can't hold unicode
status += '; '.join(set(errors[lineno]))
else:
status = '%i error%s' % (len(errors), plural)
view.set_status('sublimelint', status)
else:
view.erase_status('sublimelint')
persist.queue.delay()
|
LionelDupuy/ARCHI_PHEN
|
ImageJ/DatabaseInput_deprecated.py
|
import time
from datetime import date
import numpy
from PIL import Image
import zbar
import os,sys
import wx # GUI
# Handle time lapse!
scanner = zbar.ImageScanner()
# configure the reader
scanner.parse_config('enable')
#scanner.set_config(0, zbar.Config.ENABLE, 0)
#scanner.set_config(zbar.Symbol.QRCODE, zbar.Config.ENABLE, 1)
label = ""
# TODO
# Read label better (crop enhance contrast etc...)
# copy files
# record previous file
def readQRCODE(ImageFile):
label = ""
pilImage = Image.open(ImageFile)
width, height = pilImage.size
pilImage = pilImage.crop((int(0.18*width), int(0.2*height),int(0.97*width), int(0.95*height)))
pilImage = pilImage.convert('L')
width, height = pilImage.size
raw = pilImage.tostring()
# wrap image data
image = zbar.Image(width, height, 'Y800', raw)
# scan the image for barcodes
scanner.scan(image)
# extract results
for symbol in image:
label = symbol.data
# clean up
del(image)
return label
class MainWindow(wx.Frame):
def __init__(self, parent, title):
wx.Frame.__init__(self, parent, title=title, size=(400,300))
self.control = wx.TextCtrl(self, style=wx.TE_MULTILINE | wx.TE_READONLY)
self.CreateStatusBar() # A Statusbar in the bottom of the window
# FOlders
self.dest_folder = os.path.dirname(sys.argv[0])
self.root_folder = os.path.dirname(sys.argv[0])
# Setting up the menu.
filemenu= wx.Menu()
# wx.ID_ABOUT and wx.ID_EXIT are standard IDs provided by wxWidgets.
filemenu.Append(1, "&Base Folders"," Set folders")
filemenu.Append(2, "&Run"," scan for files")
filemenu.Append(wx.ID_ABOUT, "&About"," Information about this program")
filemenu.AppendSeparator()
filemenu.Append(wx.ID_EXIT,"E&xit"," Terminate the program")
# xxxxxxx
self.Bind(wx.EVT_MENU, self.get_folder, id=1)
self.Bind(wx.EVT_MENU, self.scan_data, id=2)
# Creating the menubar.
menuBar = wx.MenuBar()
menuBar.Append(filemenu,"&File") # Adding the "filemenu" to the MenuBar
self.SetMenuBar(menuBar) # Adding the MenuBar to the Frame content.
self.Show(True)
def get_folder(self, id):
dlg = wx.DirDialog(self, "Choose Root Folder:")
if dlg.ShowModal() == wx.ID_OK:
self.root_folder = dlg.GetPath()
dlg.Destroy()
def scan_data(self, id):
#################################################################
# Find all suitable files in the current folder
#################################################################
dir = self.root_folder#os.path.dirname(sys.argv[0])
sys.path.append(dir)
for f in os.listdir(dir):
file, ext = os.path.splitext(f) # Handles no-extension files, etc.
if ext == '.JPG':
base_row = file.split("-")
base = base_row[0]
if len(base_row) == 1:
if os.path.isfile(dir+ "\\"+ file + "-QR-.jpg"):
genotype = readQRCODE(dir+ "\\"+ file + "-QR-.jpg")
# image properties
file_tmp1 = file.split('_')
file_id = file_tmp1[1]
#os.path.getmtime(dir+ "\\"+ file +ext)
# Image identifiers
identifyer = [None,None,None]
if len(genotype) > 5:
text = "Root directory: " + dir + "\n"
text += "File: " + file + "\n"
text += "Genotype: " + genotype
self.control.SetValue(text)
wx.Yield()
identifyer = genotype.split('_')
else:
pilImage = Image.open(dir+ "\\"+ file + "-QR-.jpg")
width, height = pilImage.size
pilImage = pilImage.crop((int(0.18*width), int(0.3*height),int(0.97*width), int(0.92*height)))
width, height = pilImage.size
sc = 0.6
pilImage = pilImage.resize((int(width*sc),int(height*sc)), Image.ANTIALIAS)
img = wx.EmptyImage( *pilImage.size )
pilImageCopy = pilImage.copy()
pilImageCopyRGB = pilImageCopy.convert( 'RGB' ) # Discard any alpha from the PIL image.
pilImageRgbData =pilImageCopyRGB.tostring()
img.SetData( pilImageRgbData )
identifyer_length = 0
while identifyer_length>-1:# !=3:
dlg = wx.TextEntryDialog(self, 'Type "Species Population Id" with space as separation', 'Could not read bar code', '')
dlg.SetValue("")
self.pnl = MyFrame(dlg, -1, "Label not read", size=(int(width*sc),int(height*sc)), pos = (800,100), style = wx.DEFAULT_FRAME_STYLE, pic = img)
self.pnl.Show(True)
if dlg.ShowModal() == wx.ID_OK:
txtvalue = dlg.GetValue() #genotype.split('_')
identifyer = txtvalue.split(' ')
identifyer_length = len(identifyer)
dlg.Destroy()
else:
text = "!!! Could not recover barcode for !!! :\n\n"
text += "Root directory: " + dir + "\n"
text += "File: " + file + "\n"
self.control.SetValue(text)
wx.Yield()
class MyFrame(wx.Frame):
def __init__(
self, parent, ID, title, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE, pic = None
):
wx.Frame.__init__(self, parent, ID, title, pos, size, style)
panel = wx.Panel(self, -1)
wx.StaticBitmap(panel, -1, pic.ConvertToBitmap(), (0, 0))
def OnCloseMe(self, event):
self.Close(True)
def OnCloseWindow(self, event):
self.Destroy()
app = wx.App(False)
frame = MainWindow(None, "Sample editor")
app.MainLoop()
|
enkore/i3pystatus
|
i3pystatus/coin.py
|
import requests
import json
from decimal import Decimal
from i3pystatus import IntervalModule
from i3pystatus.core.util import internet, require
class Coin(IntervalModule):
"""
Fetches live data of all cryptocurrencies available at coinmarketcap <https://coinmarketcap.com/>.
Coin setting should be equal to the 'id' field of your coin in <https://api.coinmarketcap.com/v1/ticker/>.
Example coin settings: bitcoin, bitcoin-cash, ethereum, litecoin, dash, lisk.
Example currency settings: usd, eur, huf.
.. rubric:: Available formatters
* {symbol}
* {price}
* {rank}
* {24h_volume}
* {market_cap}
* {available_supply}
* {total_supply}
* {max_supply}
* {percent_change_1h}
* {percent_change_24h}
* {percent_change_7d}
* {last_updated} - time of last update on the API's part
* {status}
"""
settings = (
("format", "format string used for output."),
("color"),
("coin", "cryptocurrency to fetch"),
("decimal", "round coin price down to this decimal place"),
("currency", "fiat currency to show fiscal data"),
("symbol", "coin symbol"),
("interval", "update interval in seconds"),
("status_interval", "percent change status in the last: '1h' / '24h' / '7d'")
)
symbol = "¤"
color = None
format = "{symbol} {price}{status}"
coin = "ethereum"
currency = "USD"
interval = 600
status_interval = "24h"
decimal = 2
def fetch_data(self):
response = requests.get("https://api.coinmarketcap.com/v1/ticker/{}/?convert={}".format(self.coin, self.currency))
coin_data = response.json()[0]
coin_data["price"] = coin_data.pop("price_{}".format(self.currency.lower()))
coin_data["24h_volume"] = coin_data.pop("24h_volume_{}".format(self.currency.lower()))
coin_data["market_cap"] = coin_data.pop("market_cap_{}".format(self.currency.lower()))
coin_data["symbol"] = self.symbol
return coin_data
def set_status(self, change):
if change > 10:
return '⮅'
elif change > 0:
return '⭡'
elif change < -10:
return '⮇'
elif change < 0:
return '⭣'
else:
return ''
@require(internet)
def run(self):
fdict = self.fetch_data()
symbols = dict(bitcoin='฿', ethereum='Ξ', litecoin='Ł', dash='Đ')
if self.coin in symbols:
fdict["symbol"] = symbols[self.coin]
fdict["status"] = self.set_status(float(fdict["percent_change_{}".format(self.status_interval)]))
fdict["price"] = str(round(Decimal(fdict["price"]), self.decimal))
self.data = fdict
self.output = {"full_text": self.format.format(**fdict)}
if self.color is not None:
self.output['color'] = self.color
|
ptrsxu/snippetpy
|
shdisplay/progressbar.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""from python cookbook 2rd edition"""
import sys
class Progressbar(object):
def __init__(self, finalcount, block_char = "."):
self.finalcount = finalcount
self.blockcount = 0
self.block = block_char
self.f = sys.stdout
if not self.finalcount: return
self.f.write("\n--------------------- % Progress ----------------1\n")
self.f.write(" 1 2 3 4 5 6 7 8 9 0\n")
self.f.write("----0----0----0----0----0----0----0----0----0----0\n")
def progress(self, count):
count = min(count, self.finalcount)
if self.finalcount:
percentcomplete = int(round(100.0 * count / self.finalcount))
if percentcomplete < 1: percentcomplete = 1
else:
percentcomplete = 100
blockcount = int(percentcomplete // 2)
if blockcount <= self.blockcount:
return
for i in range(self.blockcount, blockcount):
self.f.write(self.block)
self.f.flush()
self.blockcount = blockcount
if percentcomplete == 100:
self.f.write("\n")
def main():
from time import sleep
print "test:"
pb = Progressbar(8, "*")
for count in range(1, 9):
pb.progress(count)
sleep(0.2)
print "test 100:"
pb = Progressbar(100)
pb.progress(20)
sleep(0.3)
pb.progress(40)
sleep(0.3)
pb.progress(55)
sleep(0.3)
pb.progress(90)
sleep(0.3)
pb.progress(100)
sleep(0.3)
print "test 1:"
pb = Progressbar(1)
pb.progress(1)
if __name__=="__main__":
main()
|
onitu/onitu
|
docs/examples/driver.py
|
import os
from onitu.api import Plug, ServiceError, DriverError
# A dummy library supposed to watch the file system
from fsmonitor import FSWatcher
plug = Plug()
@plug.handler()
def get_chunk(metadata, offset, size):
try:
with open(metadata.filename, 'rb') as f:
f.seek(offset)
return f.read(size)
except IOError as e:
raise ServiceError(
"Error reading '{}': {}".format(metadata.filename, e)
)
@plug.handler()
def upload_chunk(metadata, offset, chunk):
try:
with open(metadata.filename, 'r+b') as f:
f.seek(offset)
f.write(chunk)
except IOError as e:
raise ServiceError(
"Error writting '{}': {}".format(metadata.filename, e)
)
@plug.handler()
def end_upload(metadata):
metadata.revision = os.path.getmtime(metadata.filename)
metadata.write_revision()
class Watcher(FSWatcher):
def on_update(self, filename):
"""Called each time an update of a file is detected
"""
metadata = plug.get_metadata(filename)
metadata.revision = os.path.getmtime(metadata.filename)
metadata.size = os.path.getsize(metadata.filename)
plug.update_file(metadata)
def check_changes(self):
"""Check the changes on the file system since the last launch
"""
for filename in self.files:
revision = os.path.getmtime(filename)
metadata = plug.get_metadata(filename)
# If the file is more recent
if revision > metadata.revision:
metadata.revision = os.path.getmtime(metadata.filename)
metadata.size = os.path.getsize(metadata.filename)
plug.update_file(metadata)
def start():
try:
root = plug.options['root']
os.chdir(root)
except OSError as e:
raise DriverError("Can't access '{}': {}".format(root, e))
watcher = Watcher(root)
watcher.check_changes()
watcher.start()
plug.listen()
|
OmegaDroid/django-model-monitor
|
src/test_app/test/unit/test_monitored_model_get_changes.py
|
from django.test import TestCase
from test_app.models import MonitorAllFields, MonitorSomeFields
class MonitoredModelGetChanges(TestCase):
def test_monitor_all_fields_no_changes___result_is_empty_dict(self):
m = MonitorAllFields(first_field=1)
self.assertEqual({}, m.get_changes())
def test_monitor_all_fields_all_fields_changed___result_contains_the_old_and_new_values_for_all_fields(self):
m = MonitorAllFields(first_field=1, second_field=3)
m.first_field = 2
m.second_field = 4
self.assertEqual({
"first_field": {"old": 1, "new": 2},
"second_field": {"old": 3, "new": 4}
}, m.get_changes())
def test_monitor_all_fields_single_field_changed___result_contains_the_changed_fields_old_and_new_values(self):
m = MonitorAllFields(first_field=1, second_field=3)
m.first_field = 2
self.assertEqual({
"first_field": {"old": 1, "new": 2},
}, m.get_changes())
def test_monitor_some_fields_change_monitored_fields___result_contains_the_changed_fields_old_and_new_values(self):
m = MonitorSomeFields(monitored_field=1)
m.monitored_field = 2
self.assertEqual({
"monitored_field": {"old": 1, "new": 2},
}, m.get_changes())
def test_monitor_some_fields_change_unmonitored_fields___result_is_empty_dict(self):
m = MonitorSomeFields(unmonitored_field=1)
m.unmonitored_field = 2
self.assertEqual({}, m.get_changes())
|
levilucio/SyVOLT
|
UMLRT2Kiltera_MM/graph_MT_post__Model_T.py
|
"""
__graph_MT_post__Model_T.py___________________________________________________________
Automatically generated graphical appearance ---> MODIFY DIRECTLY WITH CAUTION
_____________________________________________________________________________
"""
import tkFont
from graphEntity import *
from GraphicalForm import *
from ATOM3Constraint import *
class graph_MT_post__Model_T(graphEntity):
def __init__(self, x, y, semObject = None):
self.semanticObject = semObject
self.sizeX, self.sizeY = 173, 91
graphEntity.__init__(self, x, y)
self.ChangesAtRunTime = 0
self.constraintList = []
if self.semanticObject: atribs = self.semanticObject.attributesToDraw()
else: atribs = None
self.graphForms = []
self.imageDict = self.getImageDict()
def DrawObject(self, drawing, showGG = 0):
self.dc = drawing
if showGG and self.semanticObject: self.drawGGLabel(drawing)
h = drawing.create_oval(self.translate([209.0, 88.0, 209.0, 88.0]), tags = (self.tag, 'connector'), outline = '', fill = '' )
self.connectors.append( h )
h = drawing.create_rectangle(self.translate([38.0, 38.0, 209.0, 127.0]), tags = self.tag, stipple = '', width = 1, outline = 'black', fill = 'cyan')
self.gf5 = GraphicalForm(drawing, h, "gf5")
self.graphForms.append(self.gf5)
font = tkFont.Font( family='Arial', size=12, weight='normal', slant='roman', underline=0)
h = drawing.create_text(self.translate([116.0, 58.0, 116.0, 12.0])[:2], tags = self.tag, font=font, fill = 'black', anchor = 'center', text = 'MT_post__Model_T', width = '0', justify= 'left', stipple='' )
self.gf38 = GraphicalForm(drawing, h, 'gf38', fontObject=font)
self.graphForms.append(self.gf38)
helv12 = tkFont.Font ( family="Helvetica", size=12, weight="bold" )
h = drawing.create_text(self.translate([-3, -3]), font=helv12,
tags = (self.tag, self.semanticObject.getClass()),
fill = "black",
text=self.semanticObject.MT_label__.toString())
self.attr_display["MT_label__"] = h
self.gf_label = GraphicalForm(drawing, h, 'gf_label', fontObject=helv12)
self.graphForms.append(self.gf_label)
def postCondition( self, actionID, * params):
return None
def preCondition( self, actionID, * params):
return None
def getImageDict( self ):
imageDict = dict()
return imageDict
new_class = graph_MT_post__Model_T
|
ryan413/gotem
|
web_html_pdf_zip/html_to_pdf.py
|
import win32com.client.makepy
import win32com.client
import os
# Generate type library so that we can access constants
win32com.client.makepy.GenerateFromTypeLibSpec('Acrobat')
# Use Unicode characters instead of their ascii psuedo-replacements
UNICODE_SNOB = 0
def convertHTML2PDF(htmlPath, pdfPath):
'Convert an HTML document to PDF format'
# Connect to Adobe Acrobat
avDoc = win32com.client.Dispatch('AcroExch.AVDoc')
# Open HTML document
avDoc.Open(os.path.abspath(htmlPath), 'html2pdf')
# Save in PDF format
pdDoc = avDoc.GetPDDoc()
pdDoc.Save(win32com.client.constants.PDSaveFull, os.path.abspath(pdfPath))
pdDoc.Close()
# Close HTML document without prompting to save
avDoc.Close(True)
def file_conversion():
directory = 'C:\\HTML'
for path, dirnames, files in os.walk(directory):
for file in files:
filepath, filename = path, file
if "clean" in filename and '.htm' in filename:
new_pdf = filename.replace('.html','.pdf')
pdf = os.path.join(filepath, new_pdf)
html = os.path.join(filepath, filename)
convertHTML2PDF(html, pdf)
else:
pass
|
rwl/godot
|
godot/common.py
|
#------------------------------------------------------------------------------
# Copyright (c) 2008 Richard W. Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#------------------------------------------------------------------------------
""" Defines traits used in more than on graph element.
"""
#------------------------------------------------------------------------------
# Imports:
#------------------------------------------------------------------------------
from enthought.traits.api import \
HasTraits, Color, Str, Enum, Float, Font, Any, Bool, Int, File, Trait, \
List, Tuple, ListStr, Property, Either, Regex
from enthought.enable.colors import ColorTrait
#------------------------------------------------------------------------------
# Syntactically concise alias trait:
#------------------------------------------------------------------------------
def Alias(name, **metadata):
""" Syntactically concise alias trait but creates a pair of lambda
functions for every alias you declare.
class MyClass(HasTraits):
line_width = Float(3.0)
thickness = Alias("line_width")
"""
return Property(lambda obj: getattr(obj, name),
lambda obj, val: setattr(obj, name, val),
**metadata)
#------------------------------------------------------------------------------
# Syntactically verbose alias trait:
#------------------------------------------------------------------------------
def _get_synced ( self, name ):
return getattr( self, self.trait( name ).sync_to )
def _set_synced ( self, name, value ):
setattr( self, self.trait( name ).sync_to, value )
Synced = Property( _get_synced, _set_synced )
#------------------------------------------------------------------------------
# Trait definitions
#------------------------------------------------------------------------------
# An ID is one of the following:
# * Any string of alphabetic ([a-zA-Z\200-\377]) characters, underscores
# ('_') or digits ([0-9]), not beginning with a digit;
# * a number [-]?(.[0-9]+ | [0-9]+(.[0-9]*)? );
# * any double-quoted string ("...") possibly containing escaped
# quotes (\")1;
# * an HTML string (<...>).
alphanum = "[a-zA-Z]"#\200-\377] "# | [0-9] "#| [_]"
number = "[-]?(.[0-9]+ | [0-9]+(.[0-9]*)? ) "
dquote = '\" '
html = "<...>"
id_trait = Regex(regex=alphanum+"|"+number+"|"+dquote+"|"+html)
pointf_trait = Tuple(Float, Float, desc="the point (x,y)", graphviz=True)
point_trait = pointf_trait #Either(
# pointf_trait, Tuple(Float, Float, Float, desc="the point (x,y,z)")
#)
color_schemes = ["X11", "Accent", "Blues", "BRBG", "BUGN", "BUPU", "Dark",
"GUBU", "Greens", "Greys", "Oranges", "OORD", "Paired", "Pastel", "PIYG",
"PRGN", "PUBU", "PUBUGN", "PUOR", "PURD", "Purples", "RDBU", "RDGY",
"RDPU", "RDYLBU", "RDYLGN", "Reds", "Set", "Spectral", "YLGN",
"YLGNBU", "YLORBR", "YLORRD"]
color_scheme_trait = Enum(color_schemes, desc="a color scheme namespace",
label="Color scheme", graphviz=True)
color_trait = Color("black", desc="drawing color for graphics, not text",
graphviz=True)
comment_trait = Str(desc="comments inserted into output", graphviz=True)
fontcolor_trait = Color("black", desc="color used for text",
label="Font color", graphviz=True)
fontname_trait = Font("Times-Roman", desc="font used for text",
label="Font name", graphviz=True)
fontsize_trait = Float(14.0, desc="size, in points, used for text",
label="Font size", graphviz=True)
label_trait = Str(desc="text label attached to objects", graphviz=True)
# FIXME: Implement layerRange
#
# layerId or layerIdslayerId,
# where layerId = "all", a decimal integer or a layer name. (An integer i
# corresponds to layer i.) The string s consists of 1 or more separator
# characters specified by the layersep attribute.
layer_trait = Str(desc="layers in which the node or edge is present",
graphviz=True)
#margin_trait = Either(
# Float, pointf_trait, desc="x and y margins of canvas or node label"
#)
margin_trait = Float(desc="x and y margins of canvas or node label",
graphviz=True)
nojustify_trait = Bool(False, label="No justify",
desc="multi-line labels will be justified in the context of itself",
graphviz=True)
peripheries_trait = Int(desc="number of peripheries used in polygonal shapes "
"and cluster boundaries", graphviz=True)
# FIXME: Implement splineType
#
# splineType
# spline ( ';' spline )*
# where spline = (endp)? (startp)? point (triple)+
# and triple = point point point
# and endp = "e,%d,%d"
# and startp = "s,%d,%d"
# If a spline has points p1 p2 p3 ... pn, (n = 1 (mod 3)), the points
# correspond to the control points of a B-spline from p1 to pn. If startp is
# given, it touches one node of the edge, and the arrowhead goes from p1 to
# startp. If startp is not given, p1 touches a node. Similarly for pn and endp.
pos_trait = Tuple(Float, Float,
desc="position of node, or spline control points", graphviz=True)
rectangle_trait = Tuple(Float, Float, Float, Float,
desc="The rect llx,lly,urx,ury gives the coordinates, in points, of the "
"lower-left corner (llx,lly) and the upper-right corner (urx,ury).",
graphviz=True)
root_trait = Str(desc="nodes to be used as the center of the layout and the "
"root of the generated spanning tree")
showboxes_trait = Trait("beginning", {"beginning": 1, "end": 2},
label="Show boxes", desc="guide boxes in PostScript output",
graphviz=True)
# Additional styles are available in device-dependent form. Style lists are
# passed to device drivers, which can use this to generate appropriate output.
edge_styles = ["dashed", "dotted", "solid", "invis", "bold"]
cluster_styles = ["filled", "rounded"]
node_styles = edge_styles + cluster_styles + ["diagonals"]
target_trait = Str(desc="if the object has a URL, this attribute determines "
"which window of the browser is used", graphviz=True)
tooltip_trait = Str(desc="tooltip annotation attached to the node or edge",
graphviz=True)
url_trait = Str(desc="hyperlinks incorporated into device-dependent output",
graphviz=True)
# EOF -------------------------------------------------------------------------
|
scottbecker/autolims
|
autolims/models.py
|
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from django.contrib.postgres.fields import JSONField
from django.utils.encoding import python_2_unicode_compatible
from transcriptic_tools import utils
from transcriptic_tools.utils import _CONTAINER_TYPES
from transcriptic_tools.enums import Temperature, CustomEnum
from django.core.exceptions import PermissionDenied
from autoprotocol import Unit
from transcriptic_tools.utils import round_volume
from db_file_storage.model_utils import delete_file, delete_file_if_needed
from helper_funcs import str_respresents_int
#create token imports
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
from django.conf import settings
COVER_TYPES = set()
for container_type in _CONTAINER_TYPES.values():
if container_type.cover_types:
COVER_TYPES.update(container_type.cover_types)
COVER_TYPES = list(COVER_TYPES)
CONTAINER_STATUS_CHOICES = ['available','destroyed','returned','inbound','outbound','pending_destroy']
TEMPERATURE_NAMES = [temp.name for temp in Temperature]
RUN_STATUS_CHOICES = ['accepted','in_progress','complete','aborted','canceled']
ALIQUOT_EFFECT_TYPES = ['liquid_transfer_in','liquid_transfer_out','instructions']
DATA_TYPES = ['image_plate','platereader','measure']
RESOURCE_KINDS = ['Reagent','NucleicAcid']
DEFAULT_ORGANIZATION = 1
@python_2_unicode_compatible
class Organization(models.Model):
name = models.CharField(max_length=200,blank=True,
default='')
subdomain = models.CharField(max_length=200,
unique=True,
db_index=True)
users = models.ManyToManyField(User,
related_name='organizations',
related_query_name='orgnization',
db_constraint=True)
deleted_at = models.DateTimeField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
#custom fields
updated_at = models.DateTimeField(auto_now=True)
def get_absolute_url(self):
return "/%s/" % self.subdomain
def __str__(self):
return self.name if self.name else 'Organization %s'%self.id
@python_2_unicode_compatible
class Project(models.Model):
name = models.CharField(max_length=200,null=True,blank=True)
bsl = models.IntegerField(default=1,blank=False,null=False)
organization = models.ForeignKey(Organization, on_delete=models.CASCADE,
related_name='projects',
related_query_name='project',
db_constraint=True,
default=DEFAULT_ORGANIZATION
)
archived_at = models.DateTimeField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
#custom fields
updated_at = models.DateTimeField(auto_now=True)
def get_absolute_url(self):
return "/%s/%s/runs"%(self.organization.subdomain,
self.id)
def __str__(self):
return self.name if self.name else 'Project %s'%self.id
class RunContainer(models.Model):
run = models.ForeignKey('Run', on_delete=models.CASCADE,
db_constraint=True,
related_name='run_containers',
related_query_name='run_container',
)
container = models.ForeignKey('Container', on_delete=models.CASCADE,
db_constraint=True,
related_name='run_containers',
related_query_name='run_container',
)
#the local label of the container within the run
container_label = models.CharField(max_length=200)
class Meta:
unique_together = ('run', 'container_label', )
@python_2_unicode_compatible
class Run(models.Model):
title = models.CharField(max_length=1000,null=True,blank=True)
status = models.CharField(max_length=200,
choices=zip(RUN_STATUS_CHOICES,
RUN_STATUS_CHOICES),
null=False,
default='accepted',
blank=False)
test_mode = models.BooleanField(blank=False,default=False)
project = models.ForeignKey(Project, on_delete=models.CASCADE,
related_name='runs',
related_query_name='run',
db_constraint=True
)
owner = models.ForeignKey(User)
completed_at = models.DateTimeField(null=True, blank=True)
canceled_at = models.DateTimeField(null=True, blank=True)
aborted_at = models.DateTimeField(null=True, blank=True)
started_at = models.DateTimeField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
flagged = models.BooleanField(default=False,null=False)
properties = JSONField(null=True,blank=True,
default=dict)
protocol = JSONField(null=True,blank=True)
#we don't know what issued means
updated_at = models.DateTimeField(auto_now=True)
containers = models.ManyToManyField('Container', related_name='runs',
related_query_name='run',
through='RunContainer',
db_constraint=True,
null=True,
blank=True)
def add_container(self, container_or_container_id, label):
if isinstance(container_or_container_id,Container):
RunContainer.objects.create(run=self,
container=container_or_container_id,
container_label = label
)
else:
RunContainer.objects.create(run=self,
container_id=container_or_container_id,
container_label = label
)
def remove_container(self,container_or_container_id):
if isinstance(container_or_container_id, Container):
RunContainer.objects.filter(run=self,
container=container_or_container_id).delete()
else:
RunContainer.objects.filter(run=self,
container_id=container_or_container_id).delete()
def save(self, *args, **kw):
new_run = False
if self.id is not None:
orig_run = Run.objects.get(id=self.id)
if orig_run.protocol != self.protocol:
raise Exception, "unable to edit autoprotocol on a run"
if not self.title:
self.name = 'Run %s'%self.id
#new run
else:
new_run = True
self.convert_transcriptic_resource_ids()
if not isinstance(self.properties,dict):
self.properties = {}
assert self.status in RUN_STATUS_CHOICES,\
'status \'%s\' not found in allowed options %s'%(self.status, str(RUN_STATUS_CHOICES))
super(Run, self).save(*args, **kw)
#only hit if this is a new Run
if not self.title:
self.title = self.name = 'Run %s'%self.id
super(Run, self).save(*args, **kw)
if new_run:
self.create_instructions()
self.populate_containers()
def convert_transcriptic_resource_ids(self):
for operation in self.protocol['instructions']:
if operation['op'] != 'provision': continue
if not isinstance(operation['resource_id'], basestring) or \
str_respresents_int(operation['resource_id']): continue
resource = Resource.objects.get(transcriptic_id = operation['resource_id'])
operation['resource_id'] = resource.id
def create_instructions(self):
for i, instruction_dict in enumerate(self.protocol['instructions']):
instruction = Instruction.objects.create(run = self,
operation = instruction_dict,
sequence_no = i)
def populate_containers(self):
organization = self.project.organization
for label, ref_dict in self.protocol['refs'].items():
if 'new' in ref_dict:
storage_condition = ref_dict['store']['where'] if 'store' in ref_dict else None
new_container = Container.objects.create(container_type_id = ref_dict['new'],
label = label,
test_mode = self.test_mode,
storage_condition = storage_condition,
status = 'available',
generated_by_run = self,
organization = organization
)
self.add_container(new_container, label=label)
else:
#check that the existing container belongs to this org
existing_container = Container.objects.get(id=ref_dict['id'])
if existing_container.status == 'destroyed':
raise Exception('Destoryed container referenced in run: Container id %s'%existing_container.id)
if existing_container.organization_id != self.project.organization_id:
raise PermissionDenied('Container %s doesn\'t belong to your org'%existing_container.id)
self.add_container(existing_container, label=label)
def __str__(self):
return self.title
def get_absolute_url(self):
return "/%s/%s/runs/%s"%(self.project.organization.subdomain,
self.project_id, self.id)
class Meta:
index_together = [
['project','test_mode','status']
]
@python_2_unicode_compatible
class Container(models.Model):
#transcriptic fields
container_type_id = models.CharField(max_length=200,
choices=zip(_CONTAINER_TYPES.keys(),
_CONTAINER_TYPES.keys()))
barcode = models.IntegerField(blank=True,null=True,unique=True, db_index=True)
cover = models.CharField(max_length=200,
blank=True,
null=True,
choices=zip(COVER_TYPES,
COVER_TYPES))
test_mode = models.BooleanField(blank=False,default=False)
label = models.CharField(max_length=1000,
blank=True,
default='',
db_index=True
)
#location_id
storage_condition = models.CharField(max_length=200,
choices=zip(TEMPERATURE_NAMES,TEMPERATURE_NAMES),
default=Temperature.ambient.name,
null=True,
blank=True)
status = models.CharField(max_length=200,
choices=zip(CONTAINER_STATUS_CHOICES,
CONTAINER_STATUS_CHOICES),
null=False,
default='available',
blank=False)
expires_at = models.DateTimeField(null=True, blank=True)
properties = JSONField(null=True,blank=True,
default=dict)
generated_by_run = models.ForeignKey(Run, on_delete=models.CASCADE,
related_name='generated_containers',
related_query_name='generated_container',
db_constraint=True,
null=True,
blank=True
)
organization = models.ForeignKey(Organization, on_delete=models.CASCADE,
related_name='containers',
related_query_name='container',
db_constraint=True
)
deleted_at = models.DateTimeField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
#custom fields
updated_at = models.DateTimeField(auto_now=True)
@classmethod
def get_container_from_run_and_container_label(cls, run_id, container_label):
return cls.objects.get(run_container__run_id = run_id,
run_container__container_label = container_label)
@property
def col_count(self):
container_type = _CONTAINER_TYPES[self.container_type_id]
return container_type.col_count
@property
def row_count(self):
container_type = _CONTAINER_TYPES[self.container_type_id]
return container_type.row_count()
def well_indexes_from(self, start, num, columnwise=False):
"""
Return a list of indexes belonging to this Container starting from
the index indicated (in integer or string form) and including the
number of proceeding wells specified. well indexes are counted from the
starting well rowwise unless columnwise is True.
Parameters
----------
start : Well, int, str
Starting well specified as a Well object, a human-readable well
index or an integer well index.
num : int
Number of wells to include in the Wellgroup.
columnwise : bool, optional
Specifies whether the wells included should be counted columnwise
instead of the default rowwise.
"""
container_type = _CONTAINER_TYPES[self.container_type_id]
start = container_type.robotize(start)
if columnwise:
row, col = container_type.decompose(start)
num_rows = self.row_count
start = col * num_rows + row
return range(start,start + num)
def get_absolute_url(self):
return "/%s/containers/%s"%(self.organization.subdomain,
self.id)
def get_column_well_indexes(self, column_index_or_indexes):
if isinstance(column_index_or_indexes,list):
result = []
for column_index in column_index_or_indexes:
result+=self.get_column_wells(self, column_index)
return result
column_index = column_index_or_indexes
num_cols = self.col_count
num_rows = self.row_count
if column_index >= num_cols:
raise ValueError('column index %s is too high, only %s cols in this container'%(column_index,num_cols))
start = num_rows*column_index
return self.all_well_indexes(columnwise=True)[start:start+num_rows]
def all_well_indexes(self, columnwise=False):
"""
Return a list of indexes representing all Wells belonging to this Container.
Parameters
----------
columnwise : bool, optional
returns the WellGroup columnwise instead of rowwise (ordered by
well index).
"""
if columnwise:
num_cols = self.col_count
num_rows = self.row_count
return [row * num_cols + col
for col in xrange(num_cols)
for row in xrange(num_rows)]
else:
return range(0,self.col_count*self.row_count)
def save(self, *args, **kwargs):
if self.barcode == '':
self.barcode = None
if self.expires_at == '':
self.expires_at = None
if self.generated_by_run == '':
self.generated_by_run = None
if self.generated_by_run_id:
#check that the project of the generated run and the current org are the same
assert self.generated_by_run.project.organization_id == self.organization_id, "Can't use a container from one org in another org's run"
if not isinstance(self.properties,dict):
self.properties = {}
super(Container, self).save(*args, **kwargs)
def __str__(self):
return '%s (%s)'%(self.label,self.id) if self.label else 'Container %s'%self.id
@python_2_unicode_compatible
class Aliquot(models.Model):
name = models.CharField(max_length=200,null=True,blank=True)
container = models.ForeignKey(Container, on_delete=models.CASCADE,
related_name='aliquots',
related_query_name='aliquot',
db_constraint=True
)
well_idx = models.IntegerField(default=0,blank=False, null=False)
#this is a string to keep precision
volume_ul = models.CharField(max_length=200,null=False,default='0',blank=False)
properties = JSONField(null=True,blank=True,
default=dict)
#resource
#lot_no
deleted_at = models.DateTimeField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
#custom fields
updated_at = models.DateTimeField(auto_now=True)
@property
def human_index(self):
container_type = _CONTAINER_TYPES[self.container.container_type_id]
return container_type.humanize(self.well_idx)
def add_volume(self, volume_to_add):
"""
Handles volume strings, e.g. '5:nanoliter'
"""
current_volume = Unit(self.volume_ul,'microliter')
if isinstance(volume_to_add,basestring) and ':' in volume_to_add:
added_volume = Unit(volume_to_add)
else:
added_volume = Unit(volume_to_add,'microliter')
added_volume = round_volume(added_volume,2)
#instruments have at most 0.00uL precision
new_volume = round_volume(current_volume+added_volume,2)
self.volume_ul = str(new_volume.to('microliter').magnitude)
return added_volume
def subtract_volume(self, volume_to_add):
"""
Handles volume strings, e.g. '5:nanoliter'
"""
current_volume = Unit(self.volume_ul,'microliter')
if isinstance(volume_to_add,basestring) and ':' in volume_to_add:
subtracted_volume = Unit(volume_to_add)
else:
subtracted_volume = Unit(volume_to_add,'microliter')
return self.add_volume(-1*subtracted_volume)
def save(self,*args, **kwargs):
if not isinstance(self.properties,dict):
self.properties = {}
super(Aliquot, self).save(*args, **kwargs)
def __str__(self):
return '%s/%s'%(self.container.label,self.well_idx)
@python_2_unicode_compatible
class Instruction(models.Model):
run = models.ForeignKey(Run,
on_delete=models.CASCADE,
related_name='instructions',
related_query_name='instruction',
db_constraint=True)
operation = JSONField(blank=True,null=True)
sequence_no = models.IntegerField(null=False,blank=False,
default=0)
started_at = models.DateTimeField(null=True, blank=True)
completed_at = models.DateTimeField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('run', 'sequence_no',)
def __str__(self):
return 'Instruction %s'%self.id
class DataImage(models.Model):
bytes = models.TextField()
filename = models.CharField(max_length=255)
mimetype = models.CharField(max_length=50)
class DataFile(models.Model):
bytes = models.TextField()
filename = models.CharField(max_length=255)
mimetype = models.CharField(max_length=50)
@python_2_unicode_compatible
class Data(models.Model):
name = models.CharField(max_length=200,null=True)
data_type = models.CharField(max_length=200,
choices=zip(DATA_TYPES,
DATA_TYPES),
null=False,
default='available',
blank=False)
sequence_no = models.IntegerField(null=False,blank=False,
default=0)
#upload_to isn't used but is required
image = models.ImageField(upload_to='autolims.DataImage/bytes/filename/mimetype', null=True, blank=True)
file = models.FileField(upload_to='autolims.DataFile/bytes/filename/mimetype', null=True, blank=True)
json = JSONField(null=True,blank=True)
instruction = models.ForeignKey(Instruction,
on_delete=models.CASCADE,
related_name='data',
related_query_name='data',
db_constraint=True,
null=True,
blank=True)
run = models.ForeignKey(Run, on_delete=models.CASCADE,
related_name='data',
related_query_name='data',
db_constraint=True,
null=True,
blank=True
)
class Meta:
unique_together = ('run', 'sequence_no',)
verbose_name_plural = "data"
def save(self, *args, **kwargs):
if self.run and self.instruction and self.run_id != self.instruction.run_id:
raise Exception, "Instruction must belong to the run of this data object"
super(Data, self).save(*args, **kwargs)
delete_file_if_needed(self, 'file')
delete_file_if_needed(self, 'image')
def delete(self, *args, **kwargs):
super(Data, self).delete(*args, **kwargs)
delete_file(self, 'file')
delete_file(self, 'image')
def __str__(self):
return "Data %s"%self.id
@python_2_unicode_compatible
class AliquotEffect(models.Model):
#visible in network console as aliquot_effects when loading a well at transcriptic
aliquot = models.ForeignKey(Aliquot,
on_delete=models.CASCADE,
related_name='aliquot_effects',
related_query_name='aliquot_effect',
db_constraint=True)
instruction = models.ForeignKey(Instruction, on_delete=models.CASCADE,
related_name='aliquot_effects',
related_query_name='aliquot_effect',
db_constraint=True)
data = JSONField(blank=True,null=True)
type = models.CharField(max_length=200,
choices=zip(ALIQUOT_EFFECT_TYPES,
ALIQUOT_EFFECT_TYPES),
null=False,
default=ALIQUOT_EFFECT_TYPES[0],
blank=False)
deleted_at = models.DateTimeField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return 'Aliquot Effect %s'%self.id
@python_2_unicode_compatible
class Resource(models.Model):
name = models.CharField(max_length=200,blank=True,
default='')
description = models.TextField(blank=True,null=True)
storage_condition = models.CharField(max_length=200,
choices=zip(TEMPERATURE_NAMES,TEMPERATURE_NAMES),
default=Temperature.ambient.name,
null=True,
blank=True)
sensitivities = JSONField(null=True,blank=True,
default=list)
properties = JSONField(null=True,blank=True,
default=dict)
kind = models.CharField(max_length=200,
choices=zip(RUN_STATUS_CHOICES,
RUN_STATUS_CHOICES),
null=False,
default='available',
blank=False)
transcriptic_id = models.CharField(max_length=200,blank=True,null=True,
default='', db_index=True,
unique=True)
deleted_at = models.DateTimeField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name if self.name else 'Resource %s'%self.id
def save(self, *args, **kwargs):
if self.transcriptic_id == '':
self.transcriptic_id = None
if not isinstance(self.sensitivities,list):
self.sensitivities = []
if not isinstance(self.properties,dict):
self.properties = []
super(Resource, self).save(*args, **kwargs)
#@python_2_unicode_compatible
#class Kit(models.Model):
#https://secure.transcriptic.com/_commercial/kits?format=json
#@python_2_unicode_compatible
#class KitItem(models.Model):
#https://secure.transcriptic.com/_commercial/kits/kit19jybkyf8ddv/kit_items?format=json
# This code is triggered whenever a new user has been created and saved to the database
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
|
kelvinxu/representation-learning
|
generate_dataset.py
|
import numpy
from os import listdir
from os.path import isfile, join
import h5py
import numpy
from scipy import misc
rng = numpy.random.RandomState(123522)
path = '/data/lisatmp3/xukelvin/'
if __name__ == "__main__":
files = [f for f in listdir(join('train'))
if isfile(join('train', f))]
# Shuffle examples around
rng.shuffle(files)
# Create HDF5 file
# train
print "Processing Train"
train_f = h5py.File(path+'dogs_vs_cats_train.hdf5', 'w')
dt = h5py.special_dtype(vlen=numpy.dtype('uint8'))
features = train_f.create_dataset('images', (20000,), dtype=dt)
shapes = train_f.create_dataset('shapes', (20000, 3), dtype='uint16')
targets = train_f.create_dataset('labels', (20000,), dtype='uint8')
for i in xrange(0,20000):
f = files[i]
image = misc.imread(join('train', f))
target = 0 if 'cat' in f else 1
features[i] = image.flatten()
targets[i] = target
shapes[i] = image.shape
print '{:.0%}\r'.format(i / 20000.),
# val
print "Processing Validation"
val_f = h5py.File(path+'dogs_vs_cats_val.hdf5', 'w')
dt = h5py.special_dtype(vlen=numpy.dtype('uint8'))
features = val_f.create_dataset('images', (2500,), dtype=dt)
shapes = val_f.create_dataset('shapes', (2500, 3), dtype='uint16')
targets = val_f.create_dataset('labels', (2500,), dtype='uint8')
for i in xrange(20000,22500):
f = files[i]
image = misc.imread(join('train', f))
target = 0 if 'cat' in f else 1
features[i-20000] = image.flatten()
targets[i-20000] = target
shapes[i-20000] = image.shape
print '{:.0%}\r'.format(i / 2500.),
# test
print "Processing Test"
test_f = h5py.File(path+'dogs_vs_cats_test.hdf5', 'w')
dt = h5py.special_dtype(vlen=numpy.dtype('uint8'))
features = test_f.create_dataset('images', (2500,), dtype=dt)
shapes = test_f.create_dataset('shapes', (2500, 3), dtype='uint16')
targets = test_f.create_dataset('labels', (2500,), dtype='uint8')
for i in xrange(22500,25000):
f = files[i]
image = misc.imread(join('train', f))
target = 0 if 'cat' in f else 1
features[i-22500] = image.flatten()
targets[i-22500] = target
shapes[i-22500] = image.shape
print '{:.0%}\r'.format(i / 2500.),
|
perna/podigger
|
migrations/versions/dde8a74cfffa_.py
|
"""empty message
Revision ID: dde8a74cfffa
Revises: 0c2841d4cfcd
Create Date: 2016-07-29 17:42:17.142867
"""
# revision identifiers, used by Alembic.
revision = 'dde8a74cfffa'
down_revision = '0c2841d4cfcd'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('podcast', sa.Column('language', sa.String(length=5), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('podcast', 'language')
### end Alembic commands ###
|
timlapluie/gldsprnt
|
classes/pre_game/pre_game.py
|
# -*- coding: utf-8 -*-
# !/usr/bin/python
import pygame
from classes.pre_game.pre_game_item import PreGameItem
class PreGame():
def __init__(self, screen, actions, players):
# Screen für Instanz definieren
self.screen = screen
self.screen_width = self.screen.get_rect().width
self.screen_height = self.screen.get_rect().height
# Actions
self.actions = actions
# vorhandene Player-Namen
self.recent_players = players
# Validierungsfehler
self.validation_error = False
# PreGameItems
self.pre_game_items = [
PreGameItem(
self.screen,
{'description': 'Spieler 1:'},
0,
0,
'bike-red-c'
),
PreGameItem(
self.screen,
{'description': 'Spieler 2:'},
0,
self.screen_height / 2,
'bike-blue-c'
)
]
# Aktiven Input festlegen (default: 0 => entspricht erstem Item)
self.active_input = 0
self.pre_game_items[self.active_input].activate_input()
self.active_color = (255, 134, 48)
self.active_cursor = '_'
def update(self, deltat):
self.validate_names()
for pre_game_item in self.pre_game_items:
pre_game_item.deactivate()
pre_game_item.update(deltat)
self.pre_game_items[self.active_input].activate()
def render(self, deltat):
for pre_game_item in self.pre_game_items:
pre_game_item.render(deltat)
def handle_keypress(self, event):
if event.unicode.isalpha():
self.pre_game_items[self.active_input].append_key(event.unicode)
elif event.key == pygame.K_BACKSPACE:
self.pre_game_items[self.active_input].delete_last_char()
elif event.key == pygame.K_RETURN:
if not self.validation_error:
self.actions['success']()
elif event.key == pygame.K_ESCAPE:
self.actions['cancel']()
elif event.key == pygame.K_DOWN:
self.increment_active_item()
elif event.key == pygame.K_UP:
self.decrement_active_item()
def set_active_item(self, input_number):
self.active_input = input_number
def increment_active_item(self):
self.pre_game_items[self.active_input].deactivate_input()
if self.active_input < len(self.pre_game_items) - 1:
self.active_input += 1
else:
self.active_input = 0
self.pre_game_items[self.active_input].activate_input()
def decrement_active_item(self):
self.pre_game_items[self.active_input].deactivate_input()
if self.active_input > 0:
self.active_input -= 1
else:
self.active_input = len(self.pre_game_items) - 1
self.pre_game_items[self.active_input].activate_input()
def validate_names(self):
error = False
for pregame_item in self.pre_game_items:
if pregame_item.input_text == '':
error = True
if error:
self.validation_error = True
else:
self.validation_error = False
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.