code
stringlengths 1
199k
|
|---|
from . uuid64 import *
|
import os
import json
def get_CMTs_in_file(aFile):
'''
Gets a list of the CMTs found in a file.
Parameters
----------
aFile : string, required
The path to a file to read.
Returns
-------
A list of CMTs found in a file.
'''
data = read_paramfile(aFile)
cmtkey_list = []
for line in data:
if line.find('CMT') >= 0:
sidx = line.find('CMT')
cmtkey_list.append(line[sidx:sidx+5])
return cmtkey_list
def find_cmt_start_idx(data, cmtkey):
'''
Finds the starting index for a CMT data block in a list of lines.
Parameters
----------
data : [str, str, ...]
A list of strings (maybe from a parameter file)
cmtkey : str
A a CMT code string like 'CMT05' to search for in the list.
Returns
-------
i : int
The first index in the list where the CMT key is found. If key is not found
returns None.
'''
for i, line in enumerate(data):
if cmtkey.upper() in line:
return i
# Key not found
return None
def read_paramfile(thefile):
'''
Opens and reads a file, returning the data as a list of lines (with newlines).
Parameters
----------
theFile : str
A path to a file to open and read.
Returns
-------
d : [str, str, str, ...]
A list of strings (with newlines at the end of each string).
'''
with open(thefile, 'r') as f:
data = f.readlines()
return data
def get_CMT_datablock(afile, cmtnum):
'''
Search file, returns the first block of data for one CMT as a list of strings.
Parameters
----------
afile : str
Path to a file to search.
cmtnum : int
The CMT number to search for. Converted (internally) to the CMT key.
Returns
-------
d : [str, str, ...]
A list of strings, one item for each line in the CMT's datablock.
Each string will have a newline charachter in it.
'''
data = read_paramfile(afile)
cmtkey = 'CMT%02i' % cmtnum
startidx = find_cmt_start_idx(data, cmtkey)
end = None
for i, line in enumerate(data[startidx:]):
if i == 0: # Header line, e.g.: "// CMT07 // Heath Tundra - (ma.....""
pass
elif i == 1: # PFT name line, e,g.: "//Decid. E.green ...."
# Not sure how/why this is working on non-PFT data blocks
# but is seems to do the trick?
pass
if (i > 0) and "CMT" in line:
#print "end of datablock, i=", i
end = startidx + i
break
return data[startidx:end]
def detect_block_with_pft_info(cmtdatablock):
# Perhaps should look at all lines??
secondline = cmtdatablock[1].strip("//").split()
if len(secondline) >= 9:
#print "Looks like a PFT header line!"
return True
else:
return False
def parse_header_line(datablock):
'''Splits a header line into components: cmtkey, text name, comment.
Assumes a CMT block header line looks like this:
// CMT07 // Heath Tundra - (ma.....
'''
# Assume header is first line
l0 = datablock[0]
# Header line, e.g:
header = l0.strip().strip("//").strip().split("//")
hdr_cmtkey = header[0].strip()
txtcmtname = header[1].strip().split('-')[0].strip()
hdrcomment = header[1].strip().split('-')[1].strip()
return hdr_cmtkey, txtcmtname, hdrcomment
def get_pft_verbose_name(cmtkey=None, pftkey=None, cmtnum=None, pftnum=None):
path2params = os.path.join(os.path.split(os.path.dirname(os.path.realpath(__file__)))[0], 'parameters/')
if cmtkey and cmtnum:
raise ValueError("you must provide only one of you cmtkey or cmtnumber")
if pftkey and pftnum:
raise ValueError("you must provide only one of pftkey or pftnumber")
if cmtkey: # convert to number
cmtnum = int(cmtkey.lstrip('CMT'))
if pftnum: # convert to key
pftkey = 'pft%i' % pftnum
data = get_CMT_datablock(os.path.join(path2params, 'cmt_calparbgc.txt'), cmtnum)
dd = cmtdatablock2dict(data)
return dd[pftkey.lower()]['name']
def cmtdatablock2dict(cmtdatablock):
'''
Converts a "CMT datablock" (list of strings) into a dict structure.
Parameters
----------
cmtdatablock : [str, str, ...]
A list of strings (with new lines) holding parameter data for a CMT.
Returns
-------
d : dict
A multi-level dict mapping names (deduced from comments) to parameter
values.holding parameter values.
'''
cmtdict = {}
pftblock = detect_block_with_pft_info(cmtdatablock)
hdr_cmtkey, txtcmtname, hdrcomment = parse_header_line(cmtdatablock)
cmtdict['tag'] = hdr_cmtkey
cmtdict['cmtname'] = txtcmtname
cmtdict['comment'] = hdrcomment
if pftblock:
# Look at the second line for something like this:
# PFT name line, like: "//Decid. E.green ...."
pftlist = cmtdatablock[1].strip("//").strip().split()
pftnames = pftlist[0:10]
for i, pftname in enumerate(pftnames):
cmtdict['pft%i'%i] = {}
cmtdict['pft%i'%i]['name'] = pftname
for i, line in enumerate(cmtdatablock):
if line.strip()[0:2] == "//":
#print "passing line", i
continue # Nothing to do...commented line
else: # normal data line
dline = line.strip().split("//")
values = dline[0].split()
comment = dline[1].strip().strip("//").split(':')[0]
if len(values) >= 5: # <--ARBITRARY! likely a pft data line?
for i, value in enumerate(values):
cmtdict['pft%i'%i][comment] = float(value)
else:
cmtdict[comment] = float(values[0])
return cmtdict
def format_CMTdatadict(dd, refFile, format=None):
'''
Returns a formatted block of CMT data.
Parameters
----------
dd : dict
Dictionary containing parameter names and values for a CMT.
refFile : str
A path to a file that should be used for reference in formatting the output.
format : str (optional)
A string specifying which format to return. Defaults to None.
Returns
-------
d : [str, str, ...]
A list of strings
'''
if format is not None:
print "NOT IMPLEMENTED YET!"
exit(-1)
ref_order = generate_reference_order(refFile)
dwpftvs = False
ll = []
ll.append("// First line comment...")
ll.append("// Second line comment (?? PFT string?)")
def is_pft_var(v):
if v not in dd.keys() and v in dd['pft0'].keys():
return True
else:
return False
for var in ref_order:
if not is_pft_var(var):
pass
else:
# get each item from dict, append to line
linestring = ''
for pft in get_datablock_pftkeys(dd):
linestring += "{:>12.6f} ".format(dd[pft][var])
linestring += ('// %s: ' % var)
ll.append(linestring)
for var in ref_order:
if is_pft_var(var):
pass # Nothing to do; already did pft stuff
else:
# get item from dict, append to line
ll.append('{:<12.5f} // comment??'.format(dd[var]))
return ll
def generate_reference_order(aFile):
'''
Lists order that variables should be in in a parameter file based on CMT 0.
Parameters
----------
aFile: str
The file to use as a base.
Returns
-------
l : [str, str, ...]
A list of strings containing the variable names, parsed from the input file
in the order they appear in the input file.
'''
cmt_calparbgc = []
db = get_CMT_datablock(aFile, 0)
pftblock = detect_block_with_pft_info(db)
ref_order = []
for line in db:
t = comment_splitter(line)
if t[0] == '':
pass # nothing before the comment, ignore this line - is has no data
else:
# looks like t0 has some data, so now we need the
# comment (t[1]) which we will further refine to get the
# tag, which we will append to the "reference order" list
tokens = t[1].strip().lstrip("//").strip().split(":")
tag = tokens[0]
desc = "".join(tokens[1:])
print "Found tag:", tag, " Desc: ", desc
ref_order.append(tag)
return ref_order
def comment_splitter(line):
'''
Splits a string into data before comment and after comment.
The comment delimiter ('//') will be included in the after component.
Parameters
----------
line : str
A string representing the line of data. May or may not contain the comment
delimiter.
Returns
-------
t : (str, str) - Tuple of strings.
A tuple containing the "before comment" string, and the "after comment"
string. The "after commnet" string will include the comment charachter.
'''
cmtidx = line.find("//")
if cmtidx < 0:
return (line, '')
else:
return (line[0:cmtidx], line[cmtidx:])
def get_datablock_pftkeys(dd):
'''
Returns a sorted list of the pft keys present in a CMT data dictionary.
Parameters
----------
dd : dict
A CMT data dictionary (as might be created from cmtdatablock2dict(..)).
Returns
-------
A sorted list of the keys present in dd that contain the string 'pft'.
'''
return sorted([i for i in dd.keys() if 'pft' in i])
def enforce_initvegc_split(aFile, cmtnum):
'''
Makes sure that the 'cpart' compartments variables match the proportions
set in initvegc variables in a cmt_bgcvegetation.txt file.
The initvegc(leaf, wood, root) variables in cmt_bgcvegetation.txt are the
measured values from literature. The cpar(leaf, wood, root) variables, which
are in the same file, should be set to the fractional make up of the the
components. So if the initvegc values for l, w, r are 100, 200, 300, then the
cpart values should be 0.166, 0.33, and 0.5. It is very easy for these values
to get out of sync when users manually update the parameter file.
Parameters
----------
aFile : str
Path to a parameter file to work on. Must have bgcvegetation.txt in the name
and must be a 'bgcvegetation' parameter file for this function to make sense
and work.
cmtnum : int
The community number in the file to work on.
Returns
-------
d : dict
A CMT data dictionary with the updated cpart values.
'''
if ('bgcvegetation.txt' not in aFile):
raise ValueError("This function only makes sense on cmt_bgcvegetation.txt files.")
d = get_CMT_datablock(aFile, cmtnum)
dd = cmtdatablock2dict(d)
for pft in get_datablock_pftkeys(dd):
sumC = dd[pft]['initvegcl'] + dd[pft]['initvegcw'] + dd[pft]['initvegcr']
if sumC > 0.0:
dd[pft]['cpartl'] = dd[pft]['initvegcl'] / sumC
dd[pft]['cpartw'] = dd[pft]['initvegcw'] / sumC
dd[pft]['cpartr'] = dd[pft]['initvegcr'] / sumC
else:
dd[pft]['cpartl'] = 0.0
dd[pft]['cpartw'] = 0.0
dd[pft]['cpartr'] = 0.0
return dd
if __name__ == '__main__':
print "NOTE! Does not work correctly on non-PFT files yet!!"
testFiles = [
'parameters/cmt_calparbgc.txt',
'parameters/cmt_bgcsoil.txt',
'parameters/cmt_bgcvegetation.txt',
'parameters/cmt_calparbgc.txt.backupsomeparams',
'parameters/cmt_dimground.txt',
'parameters/cmt_dimvegetation.txt',
'parameters/cmt_envcanopy.txt',
'parameters/cmt_envground.txt',
'parameters/cmt_firepar.txt'
]
for i in testFiles:
print "{:>45s}: {}".format(i, get_CMTs_in_file(i))
# for i in testFiles:
# print "{:>45s}".format(i)
# print "".join(get_CMT_datablock(i, 2))
# print "{:45s}".format("DONE")
d = get_CMT_datablock(testFiles[4], 2)
print "".join(d)
print json.dumps(cmtdatablock2dict(d), sort_keys=True, indent=2)
print "NOTE! Does not work correctly on non-PFT files yet!!"
|
"""
Scheduling utility methods and classes.
@author: Jp Calderone
"""
__metaclass__ = type
import time
from zope.interface import implements
from twisted.python import reflect
from twisted.python.failure import Failure
from twisted.internet import base, defer
from twisted.internet.interfaces import IReactorTime
class LoopingCall:
"""Call a function repeatedly.
If C{f} returns a deferred, rescheduling will not take place until the
deferred has fired. The result value is ignored.
@ivar f: The function to call.
@ivar a: A tuple of arguments to pass the function.
@ivar kw: A dictionary of keyword arguments to pass to the function.
@ivar clock: A provider of
L{twisted.internet.interfaces.IReactorTime}. The default is
L{twisted.internet.reactor}. Feel free to set this to
something else, but it probably ought to be set *before*
calling L{start}.
@type running: C{bool}
@ivar running: A flag which is C{True} while C{f} is scheduled to be called
(or is currently being called). It is set to C{True} when L{start} is
called and set to C{False} when L{stop} is called or if C{f} raises an
exception. In either case, it will be C{False} by the time the
C{Deferred} returned by L{start} fires its callback or errback.
@type _expectNextCallAt: C{float}
@ivar _expectNextCallAt: The time at which this instance most recently
scheduled itself to run.
@type _realLastTime: C{float}
@ivar _realLastTime: When counting skips, the time at which the skip
counter was last invoked.
@type _runAtStart: C{bool}
@ivar _runAtStart: A flag indicating whether the 'now' argument was passed
to L{LoopingCall.start}.
"""
call = None
running = False
deferred = None
interval = None
_expectNextCallAt = 0.0
_runAtStart = False
starttime = None
def __init__(self, f, *a, **kw):
self.f = f
self.a = a
self.kw = kw
from twisted.internet import reactor
self.clock = reactor
def withCount(cls, countCallable):
"""
An alternate constructor for L{LoopingCall} that makes available the
number of calls which should have occurred since it was last invoked.
Note that this number is an C{int} value; It represents the discrete
number of calls that should have been made. For example, if you are
using a looping call to display an animation with discrete frames, this
number would be the number of frames to advance.
The count is normally 1, but can be higher. For example, if the reactor
is blocked and takes too long to invoke the L{LoopingCall}, a Deferred
returned from a previous call is not fired before an interval has
elapsed, or if the callable itself blocks for longer than an interval,
preventing I{itself} from being called.
@param countCallable: A callable that will be invoked each time the
resulting LoopingCall is run, with an integer specifying the number
of calls that should have been invoked.
@type countCallable: 1-argument callable which takes an C{int}
@return: An instance of L{LoopingCall} with call counting enabled,
which provides the count as the first positional argument.
@rtype: L{LoopingCall}
@since: 9.0
"""
def counter():
now = self.clock.seconds()
lastTime = self._realLastTime
if lastTime is None:
lastTime = self.starttime
if self._runAtStart:
lastTime -= self.interval
self._realLastTime = now
lastInterval = self._intervalOf(lastTime)
thisInterval = self._intervalOf(now)
count = thisInterval - lastInterval
return countCallable(count)
self = cls(counter)
self._realLastTime = None
return self
withCount = classmethod(withCount)
def _intervalOf(self, t):
"""
Determine the number of intervals passed as of the given point in
time.
@param t: The specified time (from the start of the L{LoopingCall}) to
be measured in intervals
@return: The C{int} number of intervals which have passed as of the
given point in time.
"""
elapsedTime = t - self.starttime
intervalNum = int(elapsedTime / self.interval)
return intervalNum
def start(self, interval, now=True):
"""
Start running function every interval seconds.
@param interval: The number of seconds between calls. May be
less than one. Precision will depend on the underlying
platform, the available hardware, and the load on the system.
@param now: If True, run this call right now. Otherwise, wait
until the interval has elapsed before beginning.
@return: A Deferred whose callback will be invoked with
C{self} when C{self.stop} is called, or whose errback will be
invoked when the function raises an exception or returned a
deferred that has its errback invoked.
"""
assert not self.running, ("Tried to start an already running "
"LoopingCall.")
if interval < 0:
raise ValueError, "interval must be >= 0"
self.running = True
d = self.deferred = defer.Deferred()
self.starttime = self.clock.seconds()
self._expectNextCallAt = self.starttime
self.interval = interval
self._runAtStart = now
if now:
self()
else:
self._reschedule()
return d
def stop(self):
"""Stop running function.
"""
assert self.running, ("Tried to stop a LoopingCall that was "
"not running.")
self.running = False
if self.call is not None:
self.call.cancel()
self.call = None
d, self.deferred = self.deferred, None
d.callback(self)
def reset(self):
"""
Skip the next iteration and reset the timer.
@since: 11.1
"""
assert self.running, ("Tried to reset a LoopingCall that was "
"not running.")
if self.call is not None:
self.call.cancel()
self.call = None
self._expectNextCallAt = self.clock.seconds()
self._reschedule()
def __call__(self):
def cb(result):
if self.running:
self._reschedule()
else:
d, self.deferred = self.deferred, None
d.callback(self)
def eb(failure):
self.running = False
d, self.deferred = self.deferred, None
d.errback(failure)
self.call = None
d = defer.maybeDeferred(self.f, *self.a, **self.kw)
d.addCallback(cb)
d.addErrback(eb)
def _reschedule(self):
"""
Schedule the next iteration of this looping call.
"""
if self.interval == 0:
self.call = self.clock.callLater(0, self)
return
currentTime = self.clock.seconds()
# Find how long is left until the interval comes around again.
untilNextTime = (self._expectNextCallAt - currentTime) % self.interval
# Make sure it is in the future, in case more than one interval worth
# of time passed since the previous call was made.
nextTime = max(
self._expectNextCallAt + self.interval, currentTime + untilNextTime)
# If the interval falls on the current time exactly, skip it and
# schedule the call for the next interval.
if nextTime == currentTime:
nextTime += self.interval
self._expectNextCallAt = nextTime
self.call = self.clock.callLater(nextTime - currentTime, self)
def __repr__(self):
if hasattr(self.f, 'func_name'):
func = self.f.func_name
if hasattr(self.f, 'im_class'):
func = self.f.im_class.__name__ + '.' + func
else:
func = reflect.safe_repr(self.f)
return 'LoopingCall<%r>(%s, *%s, **%s)' % (
self.interval, func, reflect.safe_repr(self.a),
reflect.safe_repr(self.kw))
class SchedulerError(Exception):
"""
The operation could not be completed because the scheduler or one of its
tasks was in an invalid state. This exception should not be raised
directly, but is a superclass of various scheduler-state-related
exceptions.
"""
class SchedulerStopped(SchedulerError):
"""
The operation could not complete because the scheduler was stopped in
progress or was already stopped.
"""
class TaskFinished(SchedulerError):
"""
The operation could not complete because the task was already completed,
stopped, encountered an error or otherwise permanently stopped running.
"""
class TaskDone(TaskFinished):
"""
The operation could not complete because the task was already completed.
"""
class TaskStopped(TaskFinished):
"""
The operation could not complete because the task was stopped.
"""
class TaskFailed(TaskFinished):
"""
The operation could not complete because the task died with an unhandled
error.
"""
class NotPaused(SchedulerError):
"""
This exception is raised when a task is resumed which was not previously
paused.
"""
class _Timer(object):
MAX_SLICE = 0.01
def __init__(self):
self.end = time.time() + self.MAX_SLICE
def __call__(self):
return time.time() >= self.end
_EPSILON = 0.00000001
def _defaultScheduler(x):
from twisted.internet import reactor
return reactor.callLater(_EPSILON, x)
class CooperativeTask(object):
"""
A L{CooperativeTask} is a task object inside a L{Cooperator}, which can be
paused, resumed, and stopped. It can also have its completion (or
termination) monitored.
@see: L{CooperativeTask.cooperate}
@ivar _iterator: the iterator to iterate when this L{CooperativeTask} is
asked to do work.
@ivar _cooperator: the L{Cooperator} that this L{CooperativeTask}
participates in, which is used to re-insert it upon resume.
@ivar _deferreds: the list of L{defer.Deferred}s to fire when this task
completes, fails, or finishes.
@type _deferreds: L{list}
@type _cooperator: L{Cooperator}
@ivar _pauseCount: the number of times that this L{CooperativeTask} has
been paused; if 0, it is running.
@type _pauseCount: L{int}
@ivar _completionState: The completion-state of this L{CooperativeTask}.
C{None} if the task is not yet completed, an instance of L{TaskStopped}
if C{stop} was called to stop this task early, of L{TaskFailed} if the
application code in the iterator raised an exception which caused it to
terminate, and of L{TaskDone} if it terminated normally via raising
L{StopIteration}.
@type _completionState: L{TaskFinished}
"""
def __init__(self, iterator, cooperator):
"""
A private constructor: to create a new L{CooperativeTask}, see
L{Cooperator.cooperate}.
"""
self._iterator = iterator
self._cooperator = cooperator
self._deferreds = []
self._pauseCount = 0
self._completionState = None
self._completionResult = None
cooperator._addTask(self)
def whenDone(self):
"""
Get a L{defer.Deferred} notification of when this task is complete.
@return: a L{defer.Deferred} that fires with the C{iterator} that this
L{CooperativeTask} was created with when the iterator has been
exhausted (i.e. its C{next} method has raised L{StopIteration}), or
fails with the exception raised by C{next} if it raises some other
exception.
@rtype: L{defer.Deferred}
"""
d = defer.Deferred()
if self._completionState is None:
self._deferreds.append(d)
else:
d.callback(self._completionResult)
return d
def pause(self):
"""
Pause this L{CooperativeTask}. Stop doing work until
L{CooperativeTask.resume} is called. If C{pause} is called more than
once, C{resume} must be called an equal number of times to resume this
task.
@raise TaskFinished: if this task has already finished or completed.
"""
self._checkFinish()
self._pauseCount += 1
if self._pauseCount == 1:
self._cooperator._removeTask(self)
def resume(self):
"""
Resume processing of a paused L{CooperativeTask}.
@raise NotPaused: if this L{CooperativeTask} is not paused.
"""
if self._pauseCount == 0:
raise NotPaused()
self._pauseCount -= 1
if self._pauseCount == 0 and self._completionState is None:
self._cooperator._addTask(self)
def _completeWith(self, completionState, deferredResult):
"""
@param completionState: a L{TaskFinished} exception or a subclass
thereof, indicating what exception should be raised when subsequent
operations are performed.
@param deferredResult: the result to fire all the deferreds with.
"""
self._completionState = completionState
self._completionResult = deferredResult
if not self._pauseCount:
self._cooperator._removeTask(self)
# The Deferreds need to be invoked after all this is completed, because
# a Deferred may want to manipulate other tasks in a Cooperator. For
# example, if you call "stop()" on a cooperator in a callback on a
# Deferred returned from whenDone(), this CooperativeTask must be gone
# from the Cooperator by that point so that _completeWith is not
# invoked reentrantly; that would cause these Deferreds to blow up with
# an AlreadyCalledError, or the _removeTask to fail with a ValueError.
for d in self._deferreds:
d.callback(deferredResult)
def stop(self):
"""
Stop further processing of this task.
@raise TaskFinished: if this L{CooperativeTask} has previously
completed, via C{stop}, completion, or failure.
"""
self._checkFinish()
self._completeWith(TaskStopped(), Failure(TaskStopped()))
def _checkFinish(self):
"""
If this task has been stopped, raise the appropriate subclass of
L{TaskFinished}.
"""
if self._completionState is not None:
raise self._completionState
def _oneWorkUnit(self):
"""
Perform one unit of work for this task, retrieving one item from its
iterator, stopping if there are no further items in the iterator, and
pausing if the result was a L{defer.Deferred}.
"""
try:
result = self._iterator.next()
except StopIteration:
self._completeWith(TaskDone(), self._iterator)
except:
self._completeWith(TaskFailed(), Failure())
else:
if isinstance(result, defer.Deferred):
self.pause()
def failLater(f):
self._completeWith(TaskFailed(), f)
result.addCallbacks(lambda result: self.resume(),
failLater)
class Cooperator(object):
"""
Cooperative task scheduler.
"""
def __init__(self,
terminationPredicateFactory=_Timer,
scheduler=_defaultScheduler,
started=True):
"""
Create a scheduler-like object to which iterators may be added.
@param terminationPredicateFactory: A no-argument callable which will
be invoked at the beginning of each step and should return a
no-argument callable which will return True when the step should be
terminated. The default factory is time-based and allows iterators to
run for 1/100th of a second at a time.
@param scheduler: A one-argument callable which takes a no-argument
callable and should invoke it at some future point. This will be used
to schedule each step of this Cooperator.
@param started: A boolean which indicates whether iterators should be
stepped as soon as they are added, or if they will be queued up until
L{Cooperator.start} is called.
"""
self._tasks = []
self._metarator = iter(())
self._terminationPredicateFactory = terminationPredicateFactory
self._scheduler = scheduler
self._delayedCall = None
self._stopped = False
self._started = started
def coiterate(self, iterator, doneDeferred=None):
"""
Add an iterator to the list of iterators this L{Cooperator} is
currently running.
@param doneDeferred: If specified, this will be the Deferred used as
the completion deferred. It is suggested that you use the default,
which creates a new Deferred for you.
@return: a Deferred that will fire when the iterator finishes.
"""
if doneDeferred is None:
doneDeferred = defer.Deferred()
CooperativeTask(iterator, self).whenDone().chainDeferred(doneDeferred)
return doneDeferred
def cooperate(self, iterator):
"""
Start running the given iterator as a long-running cooperative task, by
calling next() on it as a periodic timed event.
@param iterator: the iterator to invoke.
@return: a L{CooperativeTask} object representing this task.
"""
return CooperativeTask(iterator, self)
def _addTask(self, task):
"""
Add a L{CooperativeTask} object to this L{Cooperator}.
"""
if self._stopped:
self._tasks.append(task) # XXX silly, I know, but _completeWith
# does the inverse
task._completeWith(SchedulerStopped(), Failure(SchedulerStopped()))
else:
self._tasks.append(task)
self._reschedule()
def _removeTask(self, task):
"""
Remove a L{CooperativeTask} from this L{Cooperator}.
"""
self._tasks.remove(task)
# If no work left to do, cancel the delayed call:
if not self._tasks and self._delayedCall:
self._delayedCall.cancel()
self._delayedCall = None
def _tasksWhileNotStopped(self):
"""
Yield all L{CooperativeTask} objects in a loop as long as this
L{Cooperator}'s termination condition has not been met.
"""
terminator = self._terminationPredicateFactory()
while self._tasks:
for t in self._metarator:
yield t
if terminator():
return
self._metarator = iter(self._tasks)
def _tick(self):
"""
Run one scheduler tick.
"""
self._delayedCall = None
for taskObj in self._tasksWhileNotStopped():
taskObj._oneWorkUnit()
self._reschedule()
_mustScheduleOnStart = False
def _reschedule(self):
if not self._started:
self._mustScheduleOnStart = True
return
if self._delayedCall is None and self._tasks:
self._delayedCall = self._scheduler(self._tick)
def start(self):
"""
Begin scheduling steps.
"""
self._stopped = False
self._started = True
if self._mustScheduleOnStart:
del self._mustScheduleOnStart
self._reschedule()
def stop(self):
"""
Stop scheduling steps. Errback the completion Deferreds of all
iterators which have been added and forget about them.
"""
self._stopped = True
for taskObj in self._tasks:
taskObj._completeWith(SchedulerStopped(),
Failure(SchedulerStopped()))
self._tasks = []
if self._delayedCall is not None:
self._delayedCall.cancel()
self._delayedCall = None
_theCooperator = Cooperator()
def coiterate(iterator):
"""
Cooperatively iterate over the given iterator, dividing runtime between it
and all other iterators which have been passed to this function and not yet
exhausted.
"""
return _theCooperator.coiterate(iterator)
def cooperate(iterator):
"""
Start running the given iterator as a long-running cooperative task, by
calling next() on it as a periodic timed event.
@param iterator: the iterator to invoke.
@return: a L{CooperativeTask} object representing this task.
"""
return _theCooperator.cooperate(iterator)
class Clock:
"""
Provide a deterministic, easily-controlled implementation of
L{IReactorTime.callLater}. This is commonly useful for writing
deterministic unit tests for code which schedules events using this API.
"""
implements(IReactorTime)
rightNow = 0.0
def __init__(self):
self.calls = []
def seconds(self):
"""
Pretend to be time.time(). This is used internally when an operation
such as L{IDelayedCall.reset} needs to determine a a time value
relative to the current time.
@rtype: C{float}
@return: The time which should be considered the current time.
"""
return self.rightNow
def _sortCalls(self):
"""
Sort the pending calls according to the time they are scheduled.
"""
self.calls.sort(lambda a, b: cmp(a.getTime(), b.getTime()))
def callLater(self, when, what, *a, **kw):
"""
See L{twisted.internet.interfaces.IReactorTime.callLater}.
"""
dc = base.DelayedCall(self.seconds() + when,
what, a, kw,
self.calls.remove,
lambda c: None,
self.seconds)
self.calls.append(dc)
self._sortCalls()
return dc
def getDelayedCalls(self):
"""
See L{twisted.internet.interfaces.IReactorTime.getDelayedCalls}
"""
return self.calls
def advance(self, amount):
"""
Move time on this clock forward by the given amount and run whatever
pending calls should be run.
@type amount: C{float}
@param amount: The number of seconds which to advance this clock's
time.
"""
self.rightNow += amount
self._sortCalls()
while self.calls and self.calls[0].getTime() <= self.seconds():
call = self.calls.pop(0)
call.called = 1
call.func(*call.args, **call.kw)
self._sortCalls()
def pump(self, timings):
"""
Advance incrementally by the given set of times.
@type timings: iterable of C{float}
"""
for amount in timings:
self.advance(amount)
def deferLater(clock, delay, callable, *args, **kw):
"""
Call the given function after a certain period of time has passed.
@type clock: L{IReactorTime} provider
@param clock: The object which will be used to schedule the delayed
call.
@type delay: C{float} or C{int}
@param delay: The number of seconds to wait before calling the function.
@param callable: The object to call after the delay.
@param *args: The positional arguments to pass to C{callable}.
@param **kw: The keyword arguments to pass to C{callable}.
@rtype: L{defer.Deferred}
@return: A deferred that fires with the result of the callable when the
specified time has elapsed.
"""
def deferLaterCancel(deferred):
delayedCall.cancel()
d = defer.Deferred(deferLaterCancel)
d.addCallback(lambda ignored: callable(*args, **kw))
delayedCall = clock.callLater(delay, d.callback, None)
return d
__all__ = [
'LoopingCall',
'Clock',
'SchedulerStopped', 'Cooperator', 'coiterate',
'deferLater',
]
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('slug', models.SlugField(allow_unicode=True, unique=True)),
('description', models.TextField(blank=True, default='')),
('description_html', models.TextField(blank=True, default='', editable=False)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='GroupMember',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='memberships', to='groups.Group')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_groups', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='group',
name='members',
field=models.ManyToManyField(through='groups.GroupMember', to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='groupmember',
unique_together=set([('group', 'user')]),
),
]
|
from network import WLAN
WLAN_MODE = 'off'
LORA_MODE = 'otaa'
LORA_OTAA_EUI = '70B3D57EF0001ED4'
LORA_OTAA_KEY = None # See README.md for instructions!
LORA_SEND_RATE = 180
GNSS_UART_PORT = 1
GNSS_UART_BAUD = 9600
GNSS_ENABLE_PIN = 'P8'
|
from django.dispatch import Signal
user_email_bounced = Signal() # args: ['bounce', 'should_deactivate']
email_bounced = Signal() # args: ['bounce', 'should_deactivate']
email_unsubscribed = Signal() # args: ['email', 'reference']
|
a = [int(i) for i in input().split()]
print(sum(a))
|
from django.conf import settings
from django.conf.urls import static
from django.urls import include, path, re_path
from django.contrib import admin
urlpatterns = [
path(r"admin/", admin.site.urls),
path(r"flickr/", include("ditto.flickr.urls")),
path(r"lastfm/", include("ditto.lastfm.urls")),
path(r"pinboard/", include("ditto.pinboard.urls")),
path(r"twitter/", include("ditto.twitter.urls")),
path(r"", include("ditto.core.urls")),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
re_path(r"^__debug__/", include(debug_toolbar.urls)),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static.static(
settings.STATIC_URL, document_root=settings.STATIC_ROOT
)
|
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
print("not implemented")
|
import collections
puzzle_input = (0,13,1,8,6,15)
test_inputs = [
([(0,3,6), 10], 0),
([(1,3,2)], 1),
([(2,1,3)], 10),
([(1,2,3)], 27),
([(2,3,1)], 78),
([(3,2,1)], 438),
([(3,1,2)], 1836),
# Expensive Tests
# ([(0,3,6), 30000000], 175594),
# ([(1,3,2), 30000000], 2578),
# ([(2,1,3), 30000000], 3544142),
# ([(1,2,3), 30000000], 261214),
# ([(2,3,1), 30000000], 6895259),
# ([(3,2,1), 30000000], 18),
# ([(3,1,2), 30000000], 362),
]
def iterate(input_, iterations=2020) -> int:
turn = 0
turn_last_spoken = collections.defaultdict(int)
prev_number = None
for value in input_:
turn_last_spoken[prev_number] = turn
prev_number = value
turn += 1
while turn < iterations:
current_number = turn_last_spoken[prev_number]
turn_last_spoken[prev_number] = turn
if current_number != 0:
current_number = turn - current_number
prev_number = current_number
turn += 1
return prev_number
for _input, expected_output in test_inputs:
print("Testing:", *_input, "...")
actual_output = iterate(*_input)
assert actual_output == expected_output, f"Expected: {expected_output}. Actual {actual_output}"
print("Part 1:", iterate(puzzle_input))
print("Part 2:", iterate(puzzle_input, 30000000))
|
import os
import time
import argparse
import tempfile
import PyPDF2
import datetime
from reportlab.pdfgen import canvas
parser = argparse.ArgumentParser("Add signatures to PDF files")
parser.add_argument("pdf", help="The pdf file to annotate")
parser.add_argument("signature", help="The signature file (png, jpg)")
parser.add_argument("--date", action='store_true')
parser.add_argument("--output", nargs='?',
help="Output file. Defaults to input filename plus '_signed'")
parser.add_argument("--coords", nargs='?', default='2x100x100x125x40',
help="Coordinates to place signature. Format: PAGExXxYxWIDTHxHEIGHT. 1x200x300x125x40 means page 1, 200 units horizontally from the bottom left, 300 units vertically from the bottom left, 125 units wide, 40 units tall. Pages count starts at 1 (1-based indexing). Units are pdf-standard units (1/72 inch).")
def _get_tmp_filename(suffix=".pdf"):
with tempfile.NamedTemporaryFile(suffix=".pdf") as fh:
return fh.name
def sign_pdf(args):
#TODO: use a gui or something.... for now, just trial-and-error the coords
page_num, x1, y1, width, height = [int(a) for a in args.coords.split("x")]
page_num -= 1
output_filename = args.output or "{}_signed{}".format(
*os.path.splitext(args.pdf)
)
pdf_fh = open(args.pdf, 'rb')
sig_tmp_fh = None
pdf = PyPDF2.PdfFileReader(pdf_fh)
writer = PyPDF2.PdfFileWriter()
sig_tmp_filename = None
for i in range(0, pdf.getNumPages()):
page = pdf.getPage(i)
if i == page_num:
# Create PDF for signature
sig_tmp_filename = _get_tmp_filename()
c = canvas.Canvas(sig_tmp_filename, pagesize=page.cropBox)
c.drawImage(args.signature, x1, y1, width, height, mask='auto')
if args.date:
c.drawString(x1 + width, y1, datetime.datetime.now().strftime("%Y-%m-%d"))
c.showPage()
c.save()
# Merge PDF in to original page
sig_tmp_fh = open(sig_tmp_filename, 'rb')
sig_tmp_pdf = PyPDF2.PdfFileReader(sig_tmp_fh)
sig_page = sig_tmp_pdf.getPage(0)
sig_page.mediaBox = page.mediaBox
page.mergePage(sig_page)
writer.addPage(page)
with open(output_filename, 'wb') as fh:
writer.write(fh)
for handle in [pdf_fh, sig_tmp_fh]:
if handle:
handle.close()
if sig_tmp_filename:
os.remove(sig_tmp_filename)
def main():
sign_pdf(parser.parse_args())
if __name__ == "__main__":
main()
|
import pexpect
import sys
import logging
import vt102
import os
import time
def termcheck(child, timeout=0):
time.sleep(0.05)
try:
logging.debug("Waiting for EOF or timeout=%d"%timeout)
child.expect(pexpect.EOF, timeout=timeout)
except pexpect.exceptions.TIMEOUT:
logging.debug("Hit timeout and have %d characters in child.before"%len(child.before))
return child.before
def termkey(child, stream, screen, key, timeout=0):
logging.debug("Sending '%s' to child"%key)
child.send(key)
s = termcheck(child)
logging.debug("Sending child.before text to vt102 stream")
stream.process(child.before)
logging.debug("vt102 screen dump")
logging.debug(screen)
logging.basicConfig(filename='menu_demo.log',level=logging.DEBUG)
rows, columns = (50,120)
stream=vt102.stream()
screen=vt102.screen((int(rows), int(columns)))
screen.attach(stream)
logging.debug("Setup vt102 with %d %d"%(int(rows),int(columns)))
logging.debug("Starting demo2.py child process...")
child = pexpect.spawn('./demo2.py', maxread=65536, dimensions=(int(rows),int(columns)))
s = termcheck(child)
logging.debug("Sending child.before (len=%d) text to vt102 stream"%len(child.before))
stream.process(child.before)
logging.debug("vt102 screen dump")
logging.debug(screen)
termkey(child, stream, screen, "a")
termkey(child, stream, screen, "1")
logging.debug("Quiting...")
|
import json
from axe.http_exceptions import BadJSON
def get_request(request):
return request
def get_query(request):
return request.args
def get_form(request):
return request.form
def get_body(request):
return request.data
def get_headers(request):
return request.headers
def get_cookies(request):
return request.cookies
def get_method(request):
return request.method
def get_json(headers, body):
content_type = headers.get('Content-Type')
if content_type != 'application/json':
return
data = body.decode('utf8')
try:
return json.loads(data)
except ValueError:
raise BadJSON
|
def tryprint():
return ('it will be oke')
|
import time
from nicfit.aio import Application
async def _main(args):
print(args)
print("Sleeping 2...")
time.sleep(2)
print("Sleeping 0...")
return 0
def atexit():
print("atexit")
app = Application(_main, atexit=atexit)
app.arg_parser.add_argument("--example", help="Example cli")
app.run()
assert not"will not execute"
|
# -*- coding: utf-8 -*-
"""
Organization Registry - Controllers
"""
module = request.controller
resourcename = request.function
if not settings.has_module(module):
raise HTTP(404, body="Module disabled: %s" % module)
def index():
""" Module's Home Page """
return s3db.cms_index(module, alt_function="index_alt")
def index_alt():
"""
Module homepage for non-Admin users when no CMS content found
"""
# @ToDo: Move this to the Template (separate deployment_setting or else a customise for non-REST controllers)
template = settings.get_template()
if template == "SandyRelief":
# Just redirect to the Facilities
redirect(URL(f="facility"))
else:
# Just redirect to the list of Organisations
redirect(URL(f="organisation"))
def group():
""" RESTful CRUD controller """
return s3_rest_controller(rheader = s3db.org_rheader)
def region():
""" RESTful CRUD controller """
return s3_rest_controller()
def sector():
""" RESTful CRUD controller """
# Pre-processor
def prep(r):
# Location Filter
s3db.gis_location_filter(r)
return True
s3.prep = prep
return s3_rest_controller()
def subsector():
""" RESTful CRUD controller """
return s3_rest_controller()
def site():
"""
RESTful CRUD controller
- used by S3SiteAutocompleteWidget
which doesn't yet support filtering to just updateable sites
- used by site_contact_person()
- used by S3OptionsFilter (e.g. Asset Log)
"""
# Pre-processor
def prep(r):
if r.representation != "json" and \
r.method not in ("search_ac", "search_address_ac", "site_contact_person"):
return False
# Location Filter
s3db.gis_location_filter(r)
return True
s3.prep = prep
return s3_rest_controller()
def sites_for_org():
"""
Used to provide the list of Sites for an Organisation
- used in User Registration
"""
try:
org = request.args[0]
except:
result = current.xml.json_message(False, 400, "No Org provided!")
else:
stable = s3db.org_site
if settings.get_org_branches():
# Find all branches for this Organisation
btable = s3db.org_organisation_branch
query = (btable.organisation_id == org) & \
(btable.deleted != True)
rows = db(query).select(btable.branch_id)
org_ids = [row.branch_id for row in rows] + [org]
query = (stable.organisation_id.belongs(org_ids)) & \
(stable.deleted != True)
else:
query = (stable.organisation_id == org) & \
(stable.deleted != True)
rows = db(query).select(stable.site_id,
stable.name,
orderby=stable.name)
result = rows.json()
finally:
response.headers["Content-Type"] = "application/json"
return result
def facility():
""" RESTful CRUD controller """
return s3db.org_facility_controller()
def facility_type():
""" RESTful CRUD controller """
return s3_rest_controller()
def office_type():
""" RESTful CRUD controller """
return s3_rest_controller()
def organisation_type():
""" RESTful CRUD controller """
return s3_rest_controller()
def organisation():
""" RESTful CRUD controller """
# Defined in the Model for use from Multiple Controllers for unified menus
return s3db.org_organisation_controller()
def org_search():
"""
Organisation REST controller
- limited to just search_ac for use in Autocompletes
- allows differential access permissions
"""
s3.prep = lambda r: r.method == "search_ac"
return s3_rest_controller(module, "organisation")
def organisation_list_represent(l):
organisation_represent = s3db.org_organisation_represent
if l:
max_length = 4
if len(l) > max_length:
return "%s, etc" % \
organisation_represent.multiple(l[:max_length])
else:
return organisation_represent.multiple(l)
else:
return NONE
def office():
""" RESTful CRUD controller """
# Defined in the Model for use from Multiple Controllers for unified menus
return s3db.org_office_controller()
def person():
""" Person controller for AddPersonWidget """
def prep(r):
if r.representation != "s3json":
# Do not serve other representations here
return False
else:
current.xml.show_ids = True
return True
s3.prep = prep
return s3_rest_controller("pr", "person")
def room():
""" RESTful CRUD controller """
return s3_rest_controller()
def mailing_list():
""" RESTful CRUD controller """
tablename = "pr_group"
table = s3db[tablename]
# Only groups with a group_type of 5
s3.filter = (table.group_type == 5)
table.group_type.writable = False
table.group_type.readable = False
table.name.label = T("Mailing List Name")
s3.crud_strings[tablename] = s3.pr_mailing_list_crud_strings
# define the list_fields
list_fields = s3db.configure(tablename,
list_fields = ["id",
"name",
"description",
])
# Components
_rheader = s3db.pr_rheader
_tabs = [(T("Organization"), "organisation/"),
(T("Mailing List Details"), None),
]
if len(request.args) > 0:
_tabs.append((T("Members"), "group_membership"))
if "viewing" in request.vars:
tablename, record_id = request.vars.viewing.rsplit(".", 1)
if tablename == "org_organisation":
table = s3db[tablename]
_rheader = s3db.org_rheader
_tabs = []
s3db.add_components("pr_group", pr_group_membership="group_id")
rheader = lambda r: _rheader(r, tabs = _tabs)
return s3_rest_controller("pr",
"group",
rheader=rheader)
def donor():
""" RESTful CRUD controller """
tablename = "org_donor"
table = s3db[tablename]
tablename = "org_donor"
s3.crud_strings[tablename] = Storage(
label_create = ADD_DONOR,
title_display = T("Donor Details"),
title_list = T("Donors Report"),
title_update = T("Edit Donor"),
label_list_button = T("List Donors"),
label_delete_button = T("Delete Donor"),
msg_record_created = T("Donor added"),
msg_record_modified = T("Donor updated"),
msg_record_deleted = T("Donor deleted"),
msg_list_empty = T("No Donors currently registered"))
s3db.configure(tablename, listadd=False)
output = s3_rest_controller()
return output
def resource():
""" RESTful CRUD controller """
def prep(r):
if r.interactive:
if r.method in ("create", "update"):
# Context from a Profile page?"
table = r.table
location_id = request.get_vars.get("(location)", None)
if location_id:
field = table.location_id
field.default = location_id
field.readable = field.writable = False
organisation_id = request.get_vars.get("(organisation)", None)
if organisation_id:
field = table.organisation_id
field.default = organisation_id
field.readable = field.writable = False
return True
s3.prep = prep
return s3_rest_controller()
def resource_type():
""" RESTful CRUD controller """
return s3_rest_controller()
def service():
""" RESTful CRUD controller """
return s3_rest_controller()
def req_match():
""" Match Requests for Sites """
return s3db.req_match()
def incoming():
"""
Incoming Shipments for Sites
@unused
"""
return inv_incoming()
def facility_geojson():
"""
Create GeoJSON[P] of Facilities for use by a high-traffic website
- controller just for testing
- function normally run on a schedule
"""
s3db.org_facility_geojson()
|
from wsgidav.dav_provider import DAVCollection, DAVNonCollection
from wsgidav.dav_error import DAVError, HTTP_FORBIDDEN
from wsgidav import util
from wsgidav.addons.tracim import role, MyFileStream
from time import mktime
from datetime import datetime
from os.path import normpath, dirname, basename
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
class Root(DAVCollection):
def __init__(self, path, environ):
super(Root, self).__init__(path, environ)
def __repr__(self):
return 'Root folder'
def getCreationDate(self):
return mktime(datetime.now().timetuple())
def getDisplayName(self):
return 'Tracim - Home'
def getLastModified(self):
return mktime(datetime.now().timetuple())
def getMemberNames(self):
return self.provider.get_all_workspaces(only_name=True)
def getMember(self, workspace_name):
workspace = self.provider.get_workspace({'label': workspace_name})
if not self.provider.has_right(
self.environ["http_authenticator.username"],
workspace.workspace_id,
role["READER"]
):
return None
return Workspace(self.path + workspace.label, self.environ, workspace)
def createEmptyResource(self, name):
raise DAVError(HTTP_FORBIDDEN)
def createCollection(self, name):
raise DAVError(HTTP_FORBIDDEN)
def getMemberList(self):
memberlist = []
for name in self.getMemberNames():
member = self.getMember(name)
if member is not None:
memberlist.append(member)
return memberlist
class Workspace(DAVCollection):
def __init__(self, path, environ, workspace):
super(Workspace, self).__init__(path, environ)
self.workspace = workspace
def __repr__(self):
return "Workspace: %s" % self.workspace.label
def getCreationDate(self):
return mktime(self.workspace.created.timetuple())
def getDisplayName(self):
return self.workspace.label
def getLastModified(self):
return mktime(self.workspace.updated.timetuple())
def getMemberNames(self):
return self.provider.get_workspace_children_id(self.workspace)
def getMember(self, item_id):
item = self.provider.get_item({'id': item_id, 'child_revision_id': None})
if not self.provider.has_right(
self.environ["http_authenticator.username"],
item.workspace_id,
role["READER"]
):
return None
return Folder(self.path + item.item_name, self.environ, item)
def createEmptyResource(self, name):
raise DAVError(HTTP_FORBIDDEN)
def createCollection(self, name):
assert "/" not in name
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.workspace.workspace_id,
role["CONTENT_MANAGER"]
):
raise DAVError(HTTP_FORBIDDEN)
item = self.provider.add_item(
item_name=name,
item_type="FOLDER",
workspace_id=self.workspace.workspace_id
)
return Folder(self.path + name, self.environ, item)
def delete(self):
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.workspace.workspace_id,
role["WORKSPACE_MANAGER"]
):
raise DAVError(HTTP_FORBIDDEN)
self.provider.delete_workspace(self.workspace)
self.removeAllLocks(True)
def copyMoveSingle(self, destpath, ismove):
if ismove:
self.provider.set_workspace_label(self.workspace, basename(normpath(destpath)))
else:
self.provider.add_workspace(basename(normpath(destpath)))
def supportRecursiveMove(self, destpath):
return True
def moveRecursive(self, destpath):
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.workspace.workspace_id,
role["WORKSPACE_MANAGER"]
) or dirname(normpath(destpath)) != '/':
raise DAVError(HTTP_FORBIDDEN)
self.provider.set_workspace_label(self.workspace, basename(normpath(destpath)))
def setLastModified(self, destpath, timestamp, dryrun):
return False
def getMemberList(self):
memberlist = []
for name in self.getMemberNames():
member = self.getMember(name)
if member is not None:
memberlist.append(member)
return memberlist
class Folder(DAVCollection):
def __init__(self, path, environ, item):
super(Folder, self).__init__(path, environ)
self.item = item
def __repr__(self):
return "Folder: %s" % self.item.item_name
def getCreationDate(self):
return mktime(self.item.created.timetuple())
def getDisplayName(self):
return self.item.item_name
def getLastModified(self):
return mktime(self.item.updated.timetuple())
def getMemberNames(self):
return self.provider.get_item_children(self.item.id)
def getMember(self, item_id):
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.item.workspace_id,
role["READER"]
):
return None
item = self.provider.get_item({'id': item_id, 'child_revision_id': None})
return self.provider.getResourceInst(self.path + item.item_name, self.environ)
def createEmptyResource(self, name):
assert "/" not in name
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.item.workspace_id,
role["CONTRIBUTOR"]
):
raise DAVError(HTTP_FORBIDDEN)
item = self.provider.add_item(
item_name=name,
item_type="FILE",
workspace_id=self.item.workspace_id,
parent_id=self.item.id
)
return File(self.path + name, self.environ, item)
def createCollection(self, name):
assert "/" not in name
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.item.workspace_id,
role["CONTENT_MANAGER"]
):
raise DAVError(HTTP_FORBIDDEN)
item = self.provider.add_item(
item_name=name,
item_type="FOLDER",
workspace_id=self.item.workspace_id,
parent_id=self.item.id
)
return Folder(self.path + name, self.environ, item)
def delete(self):
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.item.workspace_id,
role["CONTENT_MANAGER"]
):
raise DAVError(HTTP_FORBIDDEN)
self.provider.delete_item(self.item)
self.removeAllLocks(True)
def copyMoveSingle(self, destpath, ismove):
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.item.workspace_id,
role["CONTENT_MANAGER"]
) or dirname(normpath(destpath)) == '/':
raise DAVError(HTTP_FORBIDDEN)
if ismove:
self.provider.move_item(self.item, destpath)
else:
self.provider.copy_item(self.item, destpath)
def supportRecursiveMove(self, destpath):
return True
def moveRecursive(self, destpath):
self.copyMoveSingle(destpath, True)
def setLastModified(self, destpath, timestamp, dryrun):
return False
def getMemberList(self, copyOrMove=False):
memberlist = []
for name in self.getMemberNames():
member = self.getMember(name)
if member is not None:
memberlist.append(member)
print "j'ai : ", copyOrMove
if memberlist != [] and not copyOrMove:
memberlist.append(HistoryFolder(self.path + ".history", self.environ, self.item))
return memberlist
def getDescendants(self, collections=True, resources=True,
depthFirst=False, depth="infinity", addSelf=False, copyOrMove=False):
assert depth in ("0", "1", "infinity")
res = []
if addSelf and not depthFirst:
res.append(self)
if depth != "0" and self.isCollection:
for child in self.getMemberList(copyOrMove):
if not child:
_ = self.getMemberList(copyOrMove)
want = (collections and child.isCollection) or (resources and not child.isCollection)
if want and not depthFirst:
res.append(child)
if child.isCollection and depth == "infinity":
res.extend(child.getDescendants(collections, resources, depthFirst, depth, addSelf=False, copyOrMove=copyOrMove))
if want and depthFirst:
res.append(child)
if addSelf and depthFirst:
res.append(self)
return res
class HistoryFolder(Folder):
def __init__(self, path, environ, item):
super(HistoryFolder, self).__init__(path, environ, item)
def __repr__(self):
return "Folder history of : %s" % self.item.item_name
def getCreationDate(self):
return mktime(datetime.now().timetuple())
def getDisplayName(self):
return '.history'
def getLastModified(self):
return mktime(datetime.now().timetuple())
def getMember(self, item_id):
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.item.workspace_id,
role["READER"]
):
return None
item = self.provider.get_item({'id': item_id, 'child_revision_id': None})
if item.item_type == 'FOLDER':
return None
return HistoryFileFolder(self.path + item.item_name, self.environ, item)
def createEmptyResource(self, name):
raise DAVError(HTTP_FORBIDDEN)
def createCollection(self, name):
raise DAVError(HTTP_FORBIDDEN)
def handleDelete(self):
return True
def handleCopy(self, destPath, depthInfinity):
return True
def handleMove(self, destPath):
return True
def setLastModified(self, destpath, timestamp, dryrun):
return False
def getMemberList(self, copyOrMove=False):
memberlist = []
for name in self.getMemberNames():
member = self.getMember(name)
if member is not None:
memberlist.append(member)
return memberlist
class HistoryFileFolder(HistoryFolder):
def __init__(self, path, environ, item):
super(HistoryFileFolder, self).__init__(path, environ, item)
def __repr__(self):
return "File folder history of : %s" % self.item.item_name
def getCreationDate(self):
return mktime(datetime.now().timetuple())
def getDisplayName(self):
return self.item.item_name
def createCollection(self, name):
raise DAVError(HTTP_FORBIDDEN)
def getLastModified(self):
return mktime(datetime.now().timetuple())
def getMemberNames(self):
return self.provider.get_all_revisions_from_item(self.item, only_id=True)
def getMember(self, item_id):
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.item.workspace_id,
role["READER"]):
return None
item = self.provider.get_item({'id': item_id})
if item.item_type in ["FILE"]:
return HistoryFile(self.path + str(item.id) + '-' + item.item_name , self.environ, item)
else:
return HistoryOtherFile(self.path + str(item.id) + '-' + item.item_name, self.environ, item)
class File(DAVNonCollection):
def __init__(self, path, environ, item):
super(File, self).__init__(path, environ)
self.item = item
self.filestream = MyFileStream(self.provider, self.item)
def __repr__(self):
return "File: %s" % self.item.item_name
def getContentLength(self):
return len(self.item.item_content)
def getContentType(self):
return util.guessMimeType(self.item.item_name)
def getCreationDate(self):
return mktime(self.item.created.timetuple())
def getDisplayName(self):
return self.item.item_name
def getLastModified(self):
return mktime(self.item.updated.timetuple())
def getContent(self):
filestream = StringIO()
filestream.write(self.item.item_content)
filestream.seek(0)
return filestream
def beginWrite(self, contentType=None):
return self.filestream
def delete(self):
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.item.workspace_id,
role["CONTENT_MANAGER"]
):
raise DAVError(HTTP_FORBIDDEN)
self.provider.delete_item(self.item)
self.removeAllLocks(True)
def copyMoveSingle(self, destpath, ismove):
if not self.provider.has_right(
self.environ["http_authenticator.username"],
self.provider.get_workspace_id_from_path(destpath),
role["CONTRIBUTOR"]
) or not self.provider.has_right(
self.environ["http_authenticator.username"],
self.item.workspace_id,
role["READER"]
) or dirname(normpath(destpath)) == '/' \
or dirname(dirname(normpath(destpath))) == '/':
raise DAVError(HTTP_FORBIDDEN)
if ismove:
self.provider.move_all_revisions(self.item, destpath)
else:
self.provider.copy_item(self.item, destpath)
def supportRecursiveMove(self, dest):
return True
def moveRecursive(self, destpath):
self.copyMoveSingle(destpath, True)
def setLastModified(self, dest, timestamp, dryrun):
return False
class HistoryFile(File):
def __init__(self, path, environ, item):
super(HistoryFile, self).__init__(path, environ, item)
def __repr__(self):
return "File history: %s-%s" % (self.item.item_name, self.item.id)
def getDisplayName(self):
return str(self.item.id) + '-' + self.item.item_name
def beginWrite(self, contentType=None):
raise DAVError(HTTP_FORBIDDEN)
def delete(self):
raise DAVError(HTTP_FORBIDDEN)
def handleDelete(self):
return True
def handleCopy(self, destPath, depthInfinity):
return True
def handleMove(self, destPath):
return True
def copyMoveSingle(self, destpath, ismove):
raise DAVError(HTTP_FORBIDDEN)
class OtherFile(File):
def __init__(self, path, environ, item):
super(OtherFile, self).__init__(path, environ, item)
self.content = self.design(self.item.item_content)
def __repr__(self):
return "File: %s" % self.item.item_name
def getContentLength(self):
return len(self.content)
def getContentType(self):
return 'text/html'
def getContent(self):
filestream = StringIO()
filestream.write(self.content)
filestream.seek(0)
return filestream
def design(self, content):
f = open('wsgidav/addons/tracim/style.css', 'r')
style = f.read()
f.close()
file = '''
<html>
<head>
<title>Hey</title>
<style>%s</style>
</head>
<body>
<div>
%s
</div>
</body>
</html>
''' % (style, content)
return file
class HistoryOtherFile(OtherFile):
def __init__(self, path, environ, item):
super(HistoryOtherFile, self).__init__(path, environ, item)
self.content = self.design(self.item.item_content)
def __repr__(self):
return "File history: %s-%s" % (self.item.item_name, self.item.id)
def getDisplayName(self):
return str(self.item.id) + '-' + self.item.item_name
def beginWrite(self, contentType=None):
raise DAVError(HTTP_FORBIDDEN)
def delete(self):
raise DAVError(HTTP_FORBIDDEN)
def handleDelete(self):
return True
def handleCopy(self, destPath, depthInfinity):
return True
def handleMove(self, destPath):
return True
def copyMoveSingle(self, destpath, ismove):
raise DAVError(HTTP_FORBIDDEN)
|
from ez_setup import use_setuptools
use_setuptools()
import os
import sys
from fnmatch import fnmatchcase
from distutils.util import convert_path
from propane_distribution import cmdclassdict
from setuptools import setup, find_packages
from engineer import version
PROJECT = 'engineer'
standard_exclude = ('*.py', '*.pyc', '*~', '.*', '*.bak', '*.swp*')
standard_exclude_directories = ('.*', 'CVS', '_darcs', './build',
'./dist', 'EGG-INFO', '*.egg-info')
def find_package_data(
where='.', package='',
exclude=standard_exclude,
exclude_directories=standard_exclude_directories,
only_in_packages=True,
show_ignored=False):
"""
Return a dictionary suitable for use in ``package_data``
in a distutils ``setup.py`` file.
The dictionary looks like::
{'package': [files]}
Where ``files`` is a list of all the files in that package that
don't match anything in ``exclude``.
If ``only_in_packages`` is true, then top-level directories that
are not packages won't be included (but directories under packages
will).
Directories matching any pattern in ``exclude_directories`` will
be ignored; by default directories with leading ``.``, ``CVS``,
and ``_darcs`` will be ignored.
If ``show_ignored`` is true, then all the files that aren't
included in package data are shown on stderr (for debugging
purposes).
Note patterns use wildcards, or can be exact paths (including
leading ``./``), and all searching is case-insensitive.
This function is by Ian Bicking.
"""
out = {}
stack = [(convert_path(where), '', package, only_in_packages)]
while stack:
where, prefix, package, only_in_packages = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where, name)
if os.path.isdir(fn):
bad_name = False
for pattern in exclude_directories:
if fnmatchcase(name, pattern) or fn.lower() == pattern.lower():
bad_name = True
if show_ignored:
print >> sys.stderr, (
"Directory %s ignored by pattern %s"
% (fn, pattern))
break
if bad_name:
continue
if os.path.isfile(os.path.join(fn, '__init__.py')):
if not package:
new_package = name
else:
new_package = package + '.' + name
stack.append((fn, '', new_package, False))
else:
stack.append((fn, prefix + name + '/', package, only_in_packages))
elif package or not only_in_packages:
# is a file
bad_name = False
for pattern in exclude:
if fnmatchcase(name, pattern) or fn.lower() == pattern.lower():
bad_name = True
if show_ignored:
print >> sys.stderr, (
"File %s ignored by pattern %s"
% (fn, pattern))
break
if bad_name:
continue
out.setdefault(package, []).append(prefix + name)
return out
def get_install_requirements(requirements_file='requirements.txt'):
requirements = []
with open(requirements_file) as file:
temp = file.readlines()
temp = [i[:-1] for i in temp]
for line in temp:
if line is None or line == '' or line.startswith(('#', '-e', '-r')):
continue
else:
requirements.append(line)
return requirements
def get_readme():
with open('README.md') as file:
return file.read()
setup(
name=PROJECT,
version=version.string,
author='Tyler Butler',
author_email='tyler@tylerbutler.com',
platforms='any',
packages=find_packages(),
entry_points={
'console_scripts': [
'engineer=engineer.engine:cmdline',
'engineer_dev=engineer.devtools:main [dev]'
],
},
url='http://github.com/tylerbutler/engineer',
license='MIT',
description='A static website generator.',
long_description=get_readme(),
install_requires=get_install_requirements(),
tests_require=get_install_requirements('requirements_tests.txt'),
extras_require={
'dev': ['argh', 'clint']
},
cmdclass=cmdclassdict,
include_package_data=True,
package_data=find_package_data(PROJECT,
package=PROJECT,
only_in_packages=False),
# Setting to False doesn't create an egg - easier to debug and hack on
zip_safe=True,
)
|
from ._test_base import TestBase
__all__ = ['TestBase']
|
import collections
import threading
import logging
import Queue
from ant.base.ant import Ant
from ant.base.message import Message
from ant.easy.channel import Channel
from ant.easy.filter import wait_for_event, wait_for_response, wait_for_special
_logger = logging.getLogger("garmin.ant.easy.node")
class Node():
def __init__(self):
self._responses_cond = threading.Condition()
self._responses = collections.deque()
self._event_cond = threading.Condition()
self._events = collections.deque()
self._datas = Queue.Queue()
self.channels = {}
self.ant = Ant()
self._running = True
self._worker_thread = threading.Thread(target=self._worker, name="ant.easy")
self._worker_thread.start()
def new_channel(self, ctype):
channel = Channel(0, self, self.ant)
self.channels[0] = channel
channel._assign(ctype, 0x00)
return channel
def request_message(self, messageId):
_logger.debug("requesting message %#02x", messageId)
self.ant.request_message(0, messageId)
_logger.debug("done requesting message %#02x", messageId)
return self.wait_for_special(messageId)
def set_network_key(self, network, key):
self.ant.set_network_key(network, key)
return self.wait_for_response(Message.ID.SET_NETWORK_KEY)
def wait_for_event(self, ok_codes):
return wait_for_event(ok_codes, self._events, self._event_cond)
def wait_for_response(self, event_id):
return wait_for_response(event_id, self._responses, self._responses_cond)
def wait_for_special(self, event_id):
return wait_for_special(event_id, self._responses, self._responses_cond)
def _worker_response(self, channel, event, data):
self._responses_cond.acquire()
self._responses.append((channel, event, data))
self._responses_cond.notify()
self._responses_cond.release()
def _worker_event(self, channel, event, data):
if event == Message.Code.EVENT_RX_BURST_PACKET:
self._datas.put(('burst', channel, data))
elif event == Message.Code.EVENT_RX_BROADCAST:
self._datas.put(('broadcast', channel, data))
else:
self._event_cond.acquire()
self._events.append((channel, event, data))
self._event_cond.notify()
self._event_cond.release()
def _worker(self):
self.ant.response_function = self._worker_response
self.ant.channel_event_function = self._worker_event
# TODO: check capabilities
self.ant.start()
def _main(self):
while self._running:
try:
(data_type, channel, data) = self._datas.get(True, 1.0)
self._datas.task_done()
if data_type == 'broadcast':
self.channels[channel].on_broadcast_data(data)
elif data_type == 'burst':
self.channels[channel].on_burst_data(data)
else:
_logger.warning("Unknown data type '%s': %r", data_type, data)
except Queue.Empty as e:
pass
def start(self):
self._main()
def stop(self):
if self._running:
_logger.debug("Stoping ant.easy")
self._running = False
self.ant.stop()
self._worker_thread.join()
|
import os
from ConfigParser import ConfigParser
from amlib import argp
tmp_conf = ConfigParser()
tmp_path = os.path.dirname(os.path.abspath(__file__)) # /base/lib/here
tmp_path = tmp_path.split('/')
conf_path = '/'.join(tmp_path[0:-1]) # /base/lib
tmp_conf.read(conf_path+'/ampush.conf')
c = {}
c.update(tmp_conf.items('default'))
if argp.a['mode'] is not None:
try:
container_conf_key = 'am_container_' + argp.a['mode']
c['am_container'] = c[container_conf_key]
except KeyError:
log_msg = 'Terminating. No such parameter in ampush.conf: ' + \
container_conf_key
raise Exception(log_msg)
else:
c['am_container'] = c['am_container_default']
if argp.a['source'] is not None:
try:
ff_map_dir_conf_key = 'flat_file_map_dir_' + argp.a['source']
c['flat_file_map_dir'] = c[ff_map_dir_conf_key]
except KeyError:
log_msg = 'Terminating. No such parameter in ampush.conf: ' + \
ff_map_dir_conf_key
raise Exception(log_msg)
else:
c['flat_file_map_dir'] = c['flat_file_map_dir_default']
|
from __future__ import unicode_literals
import operator
import pytest
from marrow.mongo import Filter
from marrow.schema.compat import odict, py3
@pytest.fixture
def empty_ops(request):
return Filter()
@pytest.fixture
def single_ops(request):
return Filter({'roll': 27})
def test_ops_iteration(single_ops):
assert list(iter(single_ops)) == ['roll']
class TestOpsMapping(object):
def test_getitem(self, empty_ops, single_ops):
with pytest.raises(KeyError):
empty_ops['roll']
assert single_ops['roll'] == 27
def test_setitem(self, empty_ops):
assert repr(empty_ops) == "Filter([])"
empty_ops['meaning'] = 42
if py3:
assert repr(empty_ops) == "Filter([('meaning', 42)])"
else:
assert repr(empty_ops) == "Filter([(u'meaning', 42)])"
def test_delitem(self, empty_ops, single_ops):
with pytest.raises(KeyError):
del empty_ops['roll']
if py3:
assert repr(single_ops) == "Filter([('roll', 27)])"
else:
assert repr(single_ops) == "Filter([(u'roll', 27)])"
del single_ops['roll']
assert repr(single_ops) == "Filter([])"
def test_length(self, empty_ops, single_ops):
assert len(empty_ops) == 0
assert len(single_ops) == 1
def test_keys(self, empty_ops, single_ops):
assert list(empty_ops.keys()) == []
assert list(single_ops.keys()) == ['roll']
def test_items(self, empty_ops, single_ops):
assert list(empty_ops.items()) == []
assert list(single_ops.items()) == [('roll', 27)]
def test_values(self, empty_ops, single_ops):
assert list(empty_ops.values()) == []
assert list(single_ops.values()) == [27]
def test_contains(self, single_ops):
assert 'foo' not in single_ops
assert 'roll' in single_ops
def test_equality_inequality(self, empty_ops, single_ops):
assert empty_ops == {}
assert empty_ops != {'roll': 27}
assert single_ops != {}
assert single_ops == {'roll': 27}
def test_get(self, single_ops):
assert single_ops.get('foo') is None
assert single_ops.get('foo', 42) == 42
assert single_ops.get('roll') == 27
def test_clear(self, single_ops):
assert len(single_ops.operations) == 1
single_ops.clear()
assert len(single_ops.operations) == 0
def test_pop(self, single_ops):
assert len(single_ops.operations) == 1
with pytest.raises(KeyError):
single_ops.pop('foo')
assert single_ops.pop('foo', 42) == 42
assert len(single_ops.operations) == 1
assert single_ops.pop('roll') == 27
assert len(single_ops.operations) == 0
def test_popitem(self, single_ops):
assert len(single_ops.operations) == 1
assert single_ops.popitem() == ('roll', 27)
assert len(single_ops.operations) == 0
with pytest.raises(KeyError):
single_ops.popitem()
def test_update(self, empty_ops, single_ops):
assert len(empty_ops.operations) == 0
empty_ops.update(name="Bob Dole")
assert len(empty_ops.operations) == 1
if py3:
assert repr(empty_ops) == "Filter([('name', 'Bob Dole')])"
else:
assert repr(empty_ops) == "Filter([('name', u'Bob Dole')])"
assert len(single_ops.operations) == 1
if py3:
assert repr(single_ops) == "Filter([('roll', 27)])"
else:
assert repr(single_ops) == "Filter([(u'roll', 27)])"
single_ops.update([('name', "Bob Dole")])
assert len(single_ops.operations) == 2
if py3:
assert repr(single_ops) in ("Filter([('roll', 27), ('name', 'Bob Dole')])", "Filter([('name', 'Bob Dole'), ('roll', 27)])")
else:
assert repr(single_ops) in ("Filter([(u'roll', 27), (u'name', u'Bob Dole')])", "Filter([(u'name', u'Bob Dole'), (u'roll', 27)])")
def test_setdefault(self, empty_ops):
assert len(empty_ops.operations) == 0
empty_ops.setdefault('fnord', 42)
assert len(empty_ops.operations) == 1
assert empty_ops.operations['fnord'] == 42
empty_ops.setdefault('fnord', 27)
assert len(empty_ops.operations) == 1
assert empty_ops.operations['fnord'] == 42
def test_ops_shallow_copy(self, single_ops):
assert single_ops.operations == single_ops.copy().operations
class TestOperationsCombination(object):
def test_operations_and_clean_merge(self):
comb = Filter({'roll': 27}) & Filter({'foo': 42})
assert comb.as_query == {'roll': 27, 'foo': 42}
def test_operations_and_operator_overlap(self):
comb = Filter({'roll': {'$gte': 27}}) & Filter({'roll': {'$lte': 42}})
assert comb.as_query == {'roll': {'$gte': 27, '$lte': 42}}
def test_paradoxical_condition(self):
comb = Filter({'roll': 27}) & Filter({'roll': {'$lte': 42}})
assert comb.as_query == {'roll': {'$eq': 27, '$lte': 42}}
comb = Filter({'roll': {'$gte': 27}}) & Filter({'roll': 42})
assert list(comb.as_query['roll'].items()) in ([('$gte', 27), ('$eq', 42)], [('$eq', 42), ('$gte', 27)])
def test_operations_or_clean_merge(self):
comb = Filter({'roll': 27}) | Filter({'foo': 42})
assert comb.as_query == {'$or': [{'roll': 27}, {'foo': 42}]}
comb = comb | Filter({'bar': 'baz'})
assert comb.as_query == {'$or': [{'roll': 27}, {'foo': 42}, {'bar': 'baz'}]}
def test_operations_hard_and(self):
comb = Filter({'$and': [{'a': 1}, {'b': 2}]}) & Filter({'$and': [{'c': 3}]})
assert comb.as_query == {'$and': [{'a': 1}, {'b': 2}, {'c': 3}]}
def test_operations_soft_and(self):
comb = Filter({'$and': [{'a': 1}, {'b': 2}]}) & Filter({'c': 3})
assert comb.as_query == {'$and': [{'a': 1}, {'b': 2}], 'c': 3}
|
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancersOperations:
"""LoadBalancersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}'} # type: ignore
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.LoadBalancer":
"""Gets the specified load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LoadBalancer, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_09_01.models.LoadBalancer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LoadBalancer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
load_balancer_name: str,
parameters: "_models.LoadBalancer",
**kwargs: Any
) -> "_models.LoadBalancer":
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'LoadBalancer')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('LoadBalancer', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('LoadBalancer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
load_balancer_name: str,
parameters: "_models.LoadBalancer",
**kwargs: Any
) -> AsyncLROPoller["_models.LoadBalancer"]:
"""Creates or updates a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param parameters: Parameters supplied to the create or update load balancer operation.
:type parameters: ~azure.mgmt.network.v2016_09_01.models.LoadBalancer
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either LoadBalancer or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_09_01.models.LoadBalancer]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancer"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('LoadBalancer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}'} # type: ignore
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.LoadBalancerListResult"]:
"""Gets all the load balancers in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_09_01.models.LoadBalancerListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/loadBalancers'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.LoadBalancerListResult"]:
"""Gets all the load balancers in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_09_01.models.LoadBalancerListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers'} # type: ignore
|
"""Contains tests for oweb.views.updates.item_update"""
from unittest import skip
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from django.contrib.auth.models import User
from oweb.tests import OWebViewTests
from oweb.models.account import Account
from oweb.models.research import Research
from oweb.models.ship import Ship
from oweb.models.planet import Planet, Moon
from oweb.models.building import Building
from oweb.models.defense import Defense
@override_settings(AUTH_USER_MODEL='auth.User')
class OWebViewsItemUpdateTests(OWebViewTests):
def test_login_required(self):
"""Unauthenticated users should be redirected to oweb:app_login"""
r = self.client.get(reverse('oweb:item_update'))
self.assertRedirects(r,
reverse('oweb:app_login'),
status_code=302,
target_status_code=200)
def test_account_owner(self):
"""Can somebody update an item he doesn't posess?"""
u = User.objects.get(username='test01')
acc = Account.objects.get(owner=u)
res_pre = Research.objects.filter(account=acc).first()
self.client.login(username='test02', password='foo')
r = self.client.post(reverse('oweb:item_update'),
data={ 'item_type': 'research',
'item_id': res_pre.id,
'item_level': res_pre.level + 1 },
HTTP_REFERER=reverse('oweb:account_research',
args=[acc.id]))
self.assertEqual(r.status_code, 403)
self.assertTemplateUsed(r, 'oweb/403.html')
def test_no_post(self):
"""What if no POST data is supplied?"""
self.client.login(username='test01', password='foo')
r = self.client.post(reverse('oweb:item_update'))
self.assertEqual(r.status_code, 500)
self.assertTemplateUsed(r, 'oweb/500.html')
def test_research_update(self):
"""Does ``item_update()`` correctly update researches?
Basically the Django ORM can be trusted, but since there is some logic
involved in determine the correct field to update, this test is
included
"""
u = User.objects.get(username='test01')
acc = Account.objects.get(owner=u)
res_pre = Research.objects.filter(account=acc).first()
self.client.login(username='test01', password='foo')
r = self.client.post(reverse('oweb:item_update'),
data={ 'item_type': 'research',
'item_id': res_pre.id,
'item_level': res_pre.level + 1 },
HTTP_REFERER=reverse('oweb:account_research',
args=[acc.id]))
self.assertRedirects(r,
reverse('oweb:account_research', args=[acc.id]),
status_code=302,
target_status_code=200)
res_post = Research.objects.get(pk=res_pre.pk)
self.assertEqual(res_pre.level + 1, res_post.level)
def test_ship_update(self):
"""Does ``item_update()`` correctly update ships?
Basically the Django ORM can be trusted, but since there is some logic
involved in determine the correct field to update, this test is
included
"""
u = User.objects.get(username='test01')
acc = Account.objects.get(owner=u)
ship_pre = Ship.objects.filter(account=acc).first()
self.client.login(username='test01', password='foo')
r = self.client.post(reverse('oweb:item_update'),
data={ 'item_type': 'ship',
'item_id': ship_pre.id,
'item_level': ship_pre.count + 1338 },
HTTP_REFERER=reverse('oweb:account_ships',
args=[acc.id]))
self.assertRedirects(r,
reverse('oweb:account_ships', args=[acc.id]),
status_code=302,
target_status_code=200)
ship_post = Ship.objects.get(pk=ship_pre.pk)
self.assertEqual(ship_pre.count + 1338, ship_post.count)
def test_building_update(self):
"""Does ``item_update()`` correctly update buildings?
Basically the Django ORM can be trusted, but since there is some logic
involved in determine the correct field to update, this test is
included
"""
u = User.objects.get(username='test01')
acc = Account.objects.get(owner=u)
p = Planet.objects.filter(account=acc).first()
b_pre = Building.objects.filter(astro_object=p).first()
self.client.login(username='test01', password='foo')
r = self.client.post(reverse('oweb:item_update'),
data={ 'item_type': 'building',
'item_id': b_pre.id,
'item_level': b_pre.level - 1 },
HTTP_REFERER=reverse('oweb:planet_buildings',
args=[p.id]))
self.assertRedirects(r,
reverse('oweb:planet_buildings', args=[p.id]),
status_code=302,
target_status_code=200)
b_post = Building.objects.get(pk=b_pre.pk)
self.assertEqual(b_pre.level - 1, b_post.level)
def test_moon_building_update(self):
"""Does ``item_update()`` correctly update moon buildings?
Basically the Django ORM can be trusted, but since there is some logic
involved in determine the correct field to update, this test is
included
"""
u = User.objects.get(username='test01')
acc = Account.objects.get(owner=u)
p = Planet.objects.filter(account=acc).values_list('id', flat=True)
m = Moon.objects.filter(planet__in=p).first()
b_pre = Building.objects.filter(astro_object=m).first()
self.client.login(username='test01', password='foo')
r = self.client.post(reverse('oweb:item_update'),
data={ 'item_type': 'moon_building',
'item_id': b_pre.id,
'item_level': b_pre.level + 2 },
HTTP_REFERER=reverse('oweb:moon_buildings',
args=[m.id]))
self.assertRedirects(r,
reverse('oweb:moon_buildings', args=[m.id]),
status_code=302,
target_status_code=200)
b_post = Building.objects.get(pk=b_pre.pk)
self.assertEqual(b_pre.level + 2, b_post.level)
def test_defense_update(self):
"""Does ``item_update()`` correctly update defense devices?
Basically the Django ORM can be trusted, but since there is some logic
involved in determine the correct field to update, this test is
included
"""
u = User.objects.get(username='test01')
acc = Account.objects.get(owner=u)
p = Planet.objects.filter(account=acc).first()
d_pre = Defense.objects.filter(astro_object=p).first()
self.client.login(username='test01', password='foo')
r = self.client.post(reverse('oweb:item_update'),
data={ 'item_type': 'defense',
'item_id': d_pre.id,
'item_level': d_pre.count - 1 },
HTTP_REFERER=reverse('oweb:planet_defense',
args=[p.id]))
self.assertRedirects(r,
reverse('oweb:planet_defense', args=[p.id]),
status_code=302,
target_status_code=200)
d_post = Defense.objects.get(pk=d_pre.pk)
self.assertEqual(d_pre.count - 1, d_post.count)
def test_moon_defense_update(self):
"""Does ``item_update()`` correctly update moon defense devices?
Basically the Django ORM can be trusted, but since there is some logic
involved in determine the correct field to update, this test is
included
"""
u = User.objects.get(username='test01')
acc = Account.objects.get(owner=u)
p = Planet.objects.filter(account=acc).values_list('id', flat=True)
m = Moon.objects.filter(planet__in=p).first()
d_pre = Defense.objects.filter(astro_object=m).first()
self.client.login(username='test01', password='foo')
r = self.client.post(reverse('oweb:item_update'),
data={ 'item_type': 'moon_defense',
'item_id': d_pre.id,
'item_level': d_pre.count - 10000 },
HTTP_REFERER=reverse('oweb:moon_defense',
args=[m.id]))
self.assertRedirects(r,
reverse('oweb:moon_defense', args=[m.id]),
status_code=302,
target_status_code=200)
d_post = Defense.objects.get(pk=d_pre.pk)
self.assertEqual(0, d_post.count)
def test_unknown_item_type(self):
"""Does ``item_update()`` correctly handle unknown item_types?"""
self.client.login(username='test01', password='foo')
r = self.client.post(reverse('oweb:item_update'),
data={
'item_type': 'foobar',
'item_id': 1,
'item_level': 1
})
self.assertEqual(r.status_code, 500)
self.assertTemplateUsed(r, 'oweb/500.html')
|
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings.prod')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
import os
import sys
import pygame
import signal
import time
import ConfigParser
from twython import TwythonStreamer
path = os.path.join(os.path.dirname(__file__), 'py_apps/pyscope')
sys.path.append(path)
path = os.path.join(os.path.dirname(__file__), '../py_apps/twit_feed')
sys.path.append(path)
import pyscope
import twit_feed
MAX_ENTRIES = 1
FPS = 5
BET_TERM = ['#testing', '#blargz'] #['@Gr8AmTweetRace']
AUTH = { 'app_key': 'li8wn8Tb7xBifCnNIgyqUw',
'app_secret': 'vcwq36w4C4VXamlqWBDKM2E8etsOoangDoMhxNDU',
'oauth_token': '1969690717-rGw3VkRQ8IyL4OcPWtv5Y2CeBdVn8ndJrjGKraI',
'oauth_token_secret': 'KO7YIFMKWKaYTtz2zEyaSy044ixj5kIbWrDtZZL96ly0H'}
WHITE = 255,255,255
GREEN = 0,255,0
BLACK = 0,0,0
BLUE = 0,0,255
RED = 255,0,0
g_terms = []
g_bet_loop = None
g_scope = None
def draw_starting_screen():
global g_terms
global g_scope
# Create fonts
font_mode = pygame.font.Font(None, 68)
font_title_1 = pygame.font.Font(None, 68)
font_title_2 = pygame.font.Font(None, 68)
font_instr_1 = pygame.font.Font(None, 36)
font_instr_2 = pygame.font.Font(None, 36)
font_ent_title = pygame.font.Font(None, 36)
font_ent = pygame.font.Font(None, 36)
# Create background
rect_bg = pygame.draw.rect(g_scope.screen, BLACK, \
(0, 0, 540, 960), 0)
rect_title = pygame.draw.rect(g_scope.screen, WHITE, \
(20, 20, 500, 100), 0)
rect_game_mode = pygame.draw.rect(g_scope.screen, WHITE, \
(20, 140, 500, 60), 0)
rect_instructions = pygame.draw.rect(g_scope.screen, WHITE, \
(20, 220, 500, 100), 0)
rect_tweets = pygame.draw.rect(g_scope.screen, WHITE, \
(20, 340, 500, 300), 0)
# Draw title
title1 = "The Great American"
title2 = "Tweet Race"
text_title_1 = font_title_1.render(title1,1,BLACK)
text_title_2 = font_title_2.render(title2,1,BLACK)
g_scope.screen.blit(text_title_1, (40, 25))
g_scope.screen.blit(text_title_2, (130, 70))
# Draw game mode
mode_str = font_mode.render('Starting Gate',1,BLACK)
g_scope.screen.blit(mode_str, (115, 140))
# Draw instructions
instr_str_1 = 'Send a tweet to @Gr8AmTweetRace'
instr_str_2 = 'with a #term to enter!'
instr_1 = font_instr_1.render(instr_str_1,1,BLACK)
instr_2 = font_instr_2.render(instr_str_2,1,BLACK)
g_scope.screen.blit(instr_1, (40, 240))
g_scope.screen.blit(instr_2, (40, 270))
# Draw entrants
ent_title = font_ent_title.render('Contestants',1,BLACK)
g_scope.screen.blit(ent_title, (40, 360))
ent_y = 390
for i in range(0, MAX_ENTRIES):
ent_str = ''.join([str(i + 1), ': '])
if i < len(g_terms):
ent_str = ''.join([ent_str, g_terms[i]])
ent_disp = font_ent.render(ent_str,1,BLACK)
g_scope.screen.blit(ent_disp, (40, 390 + (i * 30)))
def is_in_terms(entry):
global g_terms
for term in g_terms:
if ''.join(['#', entry]) == term:
return True
return False
def main():
global g_bet_loop
global g_scope
global g_terms
# Setup Twitter streamer
tf = twit_feed.TwitFeed(AUTH)
#tf = tf_test_02.TwitFeed(AUTH)
# Tweet that we are accepting bets
# Start streamer to search for terms
tf.start_track_streamer(BET_TERM)
# Setup display
pygame.init()
#g_scope = pyscope.pyscope()
fps_clock = pygame.time.Clock()
pygame.mouse.set_visible(False)
# Main game loop
g_bet_loop = False
while g_bet_loop:
# Handle game events
for event in pygame.event.get():
# End game if quit event raises
if event.type == pygame.QUIT:
g_bet_loop = False
# End game if 'q' or 'esc' key pressed
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_q or event.key == pygame.K_ESCAPE:
g_bet_loop = False
# Get entries and print them
entries = tf.get_entries()
for entry in entries:
print entry
if is_in_terms(entry) == False:
g_terms.append(''.join(['#', entry]))
print len(g_terms)
if len(g_terms) >= MAX_ENTRIES:
print 'breaking'
g_bet_loop = False
# Update screen
draw_starting_screen()
pygame.display.update()
fps_clock.tick(FPS)
# Clean up Twitter feed and pygame
print str(pygame.time.get_ticks())
tf.stop_tracking()
print str(pygame.time.get_ticks())
pygame.quit()
# Print terms
print 'Search terms: ', g_terms
main()
|
import asyncio
from abc import ABCMeta
from collections.abc import MutableMapping
from aiohttp import web
from aiohttp.web_request import Request
from aiohttp_session import get_session
from collections.abc import Sequence
AIOLOGIN_KEY = '__aiologin__'
ON_LOGIN = 1
ON_LOGOUT = 2
ON_AUTHENTICATED = 3
ON_FORBIDDEN = 4
ON_UNAUTHORIZED = 5
class AbstractUser(MutableMapping, metaclass=ABCMeta):
def __iter__(self):
return self.__dict__.__iter__()
def __len__(self):
return len(self.__dict__)
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
setattr(self, key, value)
def __delitem__(self, key):
delattr(self, key)
@property
def authenticated(self):
raise NotImplemented()
@property
def forbidden(self):
raise NotImplemented()
class AnonymousUser(AbstractUser):
@property
def authenticated(self):
return False
@property
def forbidden(self):
return False
@asyncio.coroutine
def _unauthorized(*args, **kwargs):
raise web.HTTPUnauthorized()
@asyncio.coroutine
def _forbidden(*args, **kwargs):
raise web.HTTPForbidden()
@asyncio.coroutine
def _void(*args, **kwargs):
raise NotImplemented()
class AioLogin:
def __init__(self, request, session_name=AIOLOGIN_KEY, disabled=False,
auth_by_form=_void, auth_by_header=_void,
auth_by_session=_void, forbidden=_forbidden,
unauthorized=_unauthorized, anonymous_user=AnonymousUser,
session=get_session, signals=None):
self._request = request
self._disabled = disabled
self._session_name = session_name
self._anonymous_user = anonymous_user
self._session = session
self._auth_by_form = auth_by_form
self._auth_by_header = auth_by_header
self._auth_by_session = auth_by_session
self._unauthorized = unauthorized
self._forbidden = forbidden
self._on_login = []
self._on_logout = []
self._on_authenticated = []
self._on_forbidden = []
self._on_unauthorized = []
assert isinstance(signals, (type(None), Sequence)), \
"Excepted {!r} but received {!r}".format(Sequence, signals)
signals = [] if signals is None else signals
for sig in signals:
assert isinstance(sig, Sequence), \
"Excepted {!r} but received {!r}".format(Sequence, signals)
is_coro = asyncio.iscoroutinefunction(sig[1])
assert len(sig) == 2 and 1 <= sig[0] <= 7 and is_coro, \
"Incorrectly formatted signal argument {}".format(sig)
if sig[0] == 1:
self._on_login.append(sig[1])
elif sig[0] == 2:
self._on_logout.append(sig[1])
elif sig[0] == 3:
self._on_authenticated.append(sig[1])
elif sig[0] == 4:
self._on_forbidden.append(sig[1])
elif sig[0] == 5:
self._on_unauthorized.append(sig[1])
@asyncio.coroutine
def authenticate(self, *args, remember=False, **kwargs):
assert isinstance(remember, bool), \
"Expected {!r} but received {!r}".format(type(bool), type(remember))
user = yield from self._auth_by_form(self._request, *args, **kwargs)
if user is None:
for coro in self._on_unauthorized:
yield from coro(self._request)
raise web.HTTPUnauthorized
for coro in self._on_authenticated:
yield from coro(self._request)
yield from self.login(user, remember=remember)
@asyncio.coroutine
def login(self, user, remember):
assert isinstance(user, AbstractUser), \
"Expected {} but received {}".format(type(AbstractUser), type(user))
assert isinstance(remember, bool), \
"Expected {!r} but received {!r}".format(type(bool), type(remember))
session = yield from self._session(self._request)
try:
session.remember = remember
except:
session['_remember'] = remember
session[self._session_name] = dict(user)
for coro in self._on_login:
yield from coro(self._request)
@asyncio.coroutine
def logout(self):
session = yield from self._session(self._request)
session.invalidate()
for coro in self._on_logout:
yield from coro(self._request)
@asyncio.coroutine
def auth_by_header(self):
key = self._request.headers.get('AUTHORIZATION', None)
if key is None:
return None
return (yield from self._auth_by_header(self._request, key))
@asyncio.coroutine
def auth_by_session(self):
session = yield from self._session(self._request)
profile = session.get(self._session_name, None)
if profile is None:
return None
user = yield from self._auth_by_session(self._request, profile)
if user is None:
return None
return user
@property
def on_login(self):
return self._on_login
@property
def disabled(self):
return self._disabled
@property
def unauthorized(self):
return self._unauthorized
@property
def forbidden(self):
return self._forbidden
@property
def anonymous_user(self):
return self._anonymous_user
def setup(app, **kwargs):
app.middlewares.append(middleware_factory(**kwargs))
def middleware_factory(**options):
# noinspection PyUnusedLocal
@asyncio.coroutine
def aiologin_middleware(app, handler):
@asyncio.coroutine
def aiologin_handler(*args, **kwargs):
request = kwargs['request'] if 'request' in kwargs else args[0]
kwargs = {k: v for (k, v) in kwargs.items() if k != 'request'}
# noinspection PyTypeChecker
manager = options.get('manager', AioLogin)
request.aiologin = manager(request=request, **options)
return (yield from handler(request=request, **kwargs))
return aiologin_handler
return aiologin_middleware
def secured(func):
@asyncio.coroutine
def wrapper(*args, **kwargs):
request = kwargs['request'] if 'request' in kwargs else args[0]
kwargs = {k: v for (k, v) in kwargs.items() if k != 'request'}
if not isinstance(request, Request):
request = args[0].request
elif request not in args:
args = (request,) + args
if request.aiologin.disabled:
return (yield from func(*args, **kwargs))
user = yield from request.aiologin.auth_by_header()
if user is None:
user = yield from request.aiologin.auth_by_session()
if user is None:
user = request.aiologin.anonymous_user()
assert isinstance(user, AbstractUser), \
"Expected 'user' of type AbstractUser by got {}".format(type(user))
if not user.authenticated:
# noinspection PyProtectedMember
for coro in request.aiologin._on_unauthorized:
yield from coro(request)
return (yield from request.aiologin.unauthorized(*args, **kwargs))
if user.forbidden:
# noinspection PyProtectedMember
for coro in request.aiologin._on_forbidden:
yield from coro(request)
return (yield from request.aiologin.forbidden(*args, **kwargs))
request.current_user = user
# noinspection PyProtectedMember
for coro in request.aiologin._on_authenticated:
yield from coro(request)
return (yield from func(*args, **kwargs))
return wrapper
|
import inspect
import sys
from typing import TypeVar, Optional, Sequence, Iterable, List, Any
from owlmixin import util
from owlmixin.errors import RequiredError, UnknownPropertiesError, InvalidTypeError
from owlmixin.owlcollections import TDict, TIterator, TList
from owlmixin.owlenum import OwlEnum, OwlObjectEnum
from owlmixin.transformers import (
DictTransformer,
JsonTransformer,
YamlTransformer,
ValueTransformer,
traverse_dict,
TOption,
)
T = TypeVar("T", bound="OwlMixin")
def _is_generic(type_):
return hasattr(type_, "__origin__")
def assert_extra(cls_properties, arg_dict, cls):
extra_keys: set = set(arg_dict.keys()) - {n for n, t in cls_properties}
if extra_keys:
raise UnknownPropertiesError(cls=cls, props=sorted(extra_keys))
def assert_none(value, type_, cls, name):
if value is None:
raise RequiredError(cls=cls, prop=name, type_=type_)
def assert_types(value, types: tuple, cls, name):
if not isinstance(value, types):
raise InvalidTypeError(cls=cls, prop=name, value=value, expected=types, actual=type(value))
def traverse(
type_, name, value, cls, force_snake_case: bool, force_cast: bool, restrict: bool
) -> Any:
# pylint: disable=too-many-return-statements,too-many-branches,too-many-arguments
if isinstance(type_, str):
type_ = sys.modules[cls.__module__].__dict__.get(type_)
if hasattr(type_, "__forward_arg__"):
# `_ForwardRef` (3.6) or `ForwardRef` (>= 3.7) includes __forward_arg__
# PEP 563 -- Postponed Evaluation of Annotations
type_ = sys.modules[cls.__module__].__dict__.get(type_.__forward_arg__)
if not _is_generic(type_):
assert_none(value, type_, cls, name)
if type_ is any:
return value
if type_ is Any:
return value
if isinstance(value, type_):
return value
if issubclass(type_, OwlMixin):
assert_types(value, (type_, dict), cls, name)
return type_.from_dict(
value, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
if issubclass(type_, ValueTransformer):
return type_.from_value(value)
if force_cast:
return type_(value)
assert_types(value, (type_,), cls, name)
return value
o_type = type_.__origin__
g_type = type_.__args__
if o_type == TList:
assert_none(value, type_, cls, name)
assert_types(value, (list,), cls, name)
return TList(
[
traverse(g_type[0], f"{name}.{i}", v, cls, force_snake_case, force_cast, restrict)
for i, v in enumerate(value)
]
)
if o_type == TIterator:
assert_none(value, type_, cls, name)
assert_types(value, (Iterable,), cls, name)
return TIterator(
traverse(g_type[0], f"{name}.{i}", v, cls, force_snake_case, force_cast, restrict)
for i, v in enumerate(value)
)
if o_type == TDict:
assert_none(value, type_, cls, name)
assert_types(value, (dict,), cls, name)
return TDict(
{
k: traverse(
g_type[0], f"{name}.{k}", v, cls, force_snake_case, force_cast, restrict
)
for k, v in value.items()
}
)
if o_type == TOption:
v = value.get() if isinstance(value, TOption) else value
# TODO: Fot `from_csvf`... need to more simple!!
if (isinstance(v, str) and v) or (not isinstance(v, str) and v is not None):
return TOption(
traverse(g_type[0], name, v, cls, force_snake_case, force_cast, restrict)
)
return TOption(None)
raise RuntimeError(f"This generics is not supported `{o_type}`")
class OwlMeta(type):
def __new__(cls, name, bases, class_dict):
ret_cls = type.__new__(cls, name, bases, class_dict)
ret_cls.__methods_dict__ = dict(inspect.getmembers(ret_cls, inspect.ismethod))
return ret_cls
class OwlMixin(DictTransformer, JsonTransformer, YamlTransformer, metaclass=OwlMeta):
@classmethod
def from_dict(
cls,
d: dict,
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> T:
"""From dict to instance
:param d: Dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
Usage:
>>> from owlmixin.samples import Human, Food, Japanese
>>> human: Human = Human.from_dict({
... "id": 1,
... "name": "Tom",
... "favorites": [
... {"name": "Apple", "names_by_lang": {"en": "Apple", "de": "Apfel"}},
... {"name": "Orange"}
... ]
... })
>>> human.id
1
>>> human.name
'Tom'
>>> human.favorites[0].name
'Apple'
>>> human.favorites[0].names_by_lang.get()["de"]
'Apfel'
You can use default value
>>> taro: Japanese = Japanese.from_dict({
... "name": 'taro'
... }) # doctest: +NORMALIZE_WHITESPACE
>>> taro.name
'taro'
>>> taro.language
'japanese'
If you don't set `force_snake=False` explicitly, keys are transformed to snake case as following.
>>> human: Human = Human.from_dict({
... "--id": 1,
... "<name>": "Tom",
... "favorites": [
... {"name": "Apple", "namesByLang": {"en": "Apple"}}
... ]
... })
>>> human.id
1
>>> human.name
'Tom'
>>> human.favorites[0].names_by_lang.get()["en"]
'Apple'
You can allow extra parameters (like ``hogehoge``) if you set `restrict=False`.
>>> apple: Food = Food.from_dict({
... "name": "Apple",
... "hogehoge": "ooooooooooooooooooooo",
... }, restrict=False)
>>> apple.to_dict()
{'name': 'Apple'}
You can prohibit extra parameters (like ``hogehoge``) if you set `restrict=True` (which is default).
>>> human = Human.from_dict({
... "id": 1,
... "name": "Tom",
... "hogehoge1": "ooooooooooooooooooooo",
... "hogehoge2": ["aaaaaaaaaaaaaaaaaa", "iiiiiiiiiiiiiiiii"],
... "favorites": [
... {"name": "Apple", "namesByLang": {"en": "Apple", "de": "Apfel"}},
... {"name": "Orange"}
... ]
... }) # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
owlmixin.errors.UnknownPropertiesError:
. ∧,,_∧ ,___________________
⊂ ( ・ω・ )つ- < Unknown properties error
/// /::/ `-------------------
|::|/⊂ヽノ|::|」
/ ̄ ̄旦 ̄ ̄ ̄/|
______/ | |
|------ー----ー|/
<BLANKLINE>
`owlmixin.samples.Human` has unknown properties ['hogehoge1', 'hogehoge2']!!
<BLANKLINE>
* If you want to allow unknown properties, set `restrict=False`
* If you want to disallow unknown properties, add `hogehoge1` and `hogehoge2` to owlmixin.samples.Human
<BLANKLINE>
If you specify wrong type...
>>> human: Human = Human.from_dict({
... "id": 1,
... "name": "ichiro",
... "favorites": ["apple", "orange"]
... }) # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
owlmixin.errors.InvalidTypeError:
. ∧,,_∧ ,___________________
⊂ ( ・ω・ )つ- < Invalid Type error
/// /::/ `-------------------
|::|/⊂ヽノ|::|」
/ ̄ ̄旦 ̄ ̄ ̄/|
______/ | |
|------ー----ー|/
<BLANKLINE>
`owlmixin.samples.Human#favorites.0 = apple` doesn't match expected types.
Expected type is one of ["<class 'owlmixin.samples.Food'>", "<class 'dict'>"], but actual type is `<class 'str'>`
<BLANKLINE>
* If you want to force cast, set `force_cast=True`
* If you don't want to force cast, specify value which has correct type
<BLANKLINE>
If you don't specify required params... (ex. name
>>> human: Human = Human.from_dict({
... "id": 1
... }) # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
owlmixin.errors.RequiredError:
. ∧,,_∧ ,___________________
⊂ ( ・ω・ )つ- < Required error
/// /::/ `-------------------
|::|/⊂ヽノ|::|」
/ ̄ ̄旦 ̄ ̄ ̄/|
______/ | |
|------ー----ー|/
<BLANKLINE>
`owlmixin.samples.Human#name: <class 'str'>` is empty!!
<BLANKLINE>
* If `name` is certainly required, specify anything.
* If `name` is optional, change type from `<class 'str'>` to `TOption[<class 'str'>]`
<BLANKLINE>
"""
if isinstance(d, cls):
return d
instance: T = cls() # type: ignore
d = util.replace_keys(d, {"self": "_self"}, force_snake_case)
properties = cls.__annotations__.items()
if restrict:
assert_extra(properties, d, cls)
for n, t in properties:
f = cls.__methods_dict__.get(f"_{cls.__name__}___{n}") # type: ignore
arg_v = f(d.get(n)) if f else d.get(n)
def_v = getattr(instance, n, None)
setattr(
instance,
n,
traverse(
type_=t,
name=n,
value=def_v if arg_v is None else arg_v,
cls=cls,
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
),
)
return instance
@classmethod
def from_optional_dict(
cls,
d: Optional[dict],
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TOption[T]:
"""From dict to optional instance.
:param d: Dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
Usage:
>>> from owlmixin.samples import Human
>>> Human.from_optional_dict(None).is_none()
True
>>> Human.from_optional_dict({}).get() # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
owlmixin.errors.RequiredError:
. ∧,,_∧ ,___________________
⊂ ( ・ω・ )つ- < Required error
/// /::/ `-------------------
|::|/⊂ヽノ|::|」
/ ̄ ̄旦 ̄ ̄ ̄/|
______/ | |
|------ー----ー|/
<BLANKLINE>
`owlmixin.samples.Human#id: <class 'int'>` is empty!!
<BLANKLINE>
* If `id` is certainly required, specify anything.
* If `id` is optional, change type from `<class 'int'>` to `TOption[<class 'int'>]`
<BLANKLINE>
"""
return TOption(
cls.from_dict(
d, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
if d is not None
else None
)
@classmethod
def from_dicts(
cls,
ds: List[dict],
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TList[T]:
"""From list of dict to list of instance
:param ds: List of dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
Usage:
>>> from owlmixin.samples import Human
>>> humans: TList[Human] = Human.from_dicts([
... {"id": 1, "name": "Tom", "favorites": [{"name": "Apple"}]},
... {"id": 2, "name": "John", "favorites": [{"name": "Orange"}]}
... ])
>>> humans[0].name
'Tom'
>>> humans[1].name
'John'
"""
return TList(
[
cls.from_dict(
d, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
for d in ds
]
)
@classmethod
def from_iterable_dicts(
cls,
ds: Iterable[dict],
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TIterator[T]:
"""From iterable dict to iterable instance
:param ds: Iterable dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterator
Usage:
>>> from owlmixin.samples import Human
>>> humans: TIterator[Human] = Human.from_iterable_dicts([
... {"id": 1, "name": "Tom", "favorites": [{"name": "Apple"}]},
... {"id": 2, "name": "John", "favorites": [{"name": "Orange"}]}
... ])
>>> humans.next_at(0).get().name
'Tom'
>>> humans.next_at(0).get().name
'John'
"""
return TIterator(
cls.from_dict(
d, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
for d in ds
)
@classmethod
def from_optional_dicts(
cls,
ds: Optional[List[dict]],
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TOption[TList[T]]:
"""From list of dict to optional list of instance.
:param ds: List of dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
Usage:
>>> from owlmixin.samples import Human
>>> Human.from_optional_dicts(None).is_none()
True
>>> Human.from_optional_dicts([]).get()
[]
"""
return TOption(
cls.from_dicts(
ds, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
if ds is not None
else None
)
@classmethod
def from_optional_iterable_dicts(
cls,
ds: Optional[Iterable[dict]],
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TOption[TIterator[T]]:
"""From iterable dict to optional iterable instance.
:param ds: Iterable dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterable instance
Usage:
>>> from owlmixin.samples import Human
>>> Human.from_optional_dicts(None).is_none()
True
>>> Human.from_optional_dicts([]).get()
[]
"""
return TOption(
cls.from_iterable_dicts(
ds, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
if ds is not None
else None
)
@classmethod
def from_dicts_by_key(
cls,
ds: dict,
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TDict[T]:
"""From dict of dict to dict of instance
:param ds: Dict of dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Dict of instance
Usage:
>>> from owlmixin.samples import Human
>>> humans_by_name: TDict[Human] = Human.from_dicts_by_key({
... 'Tom': {"id": 1, "name": "Tom", "favorites": [{"name": "Apple"}]},
... 'John': {"id": 2, "name": "John", "favorites": [{"name": "Orange"}]}
... })
>>> humans_by_name['Tom'].name
'Tom'
>>> humans_by_name['John'].name
'John'
"""
return TDict(
{
k: cls.from_dict(
v, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
for k, v in ds.items()
}
)
@classmethod
def from_optional_dicts_by_key(
cls,
ds: Optional[dict],
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = True,
) -> TOption[TDict[T]]:
"""From dict of dict to optional dict of instance.
:param ds: Dict of dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Dict of instance
Usage:
>>> from owlmixin.samples import Human
>>> Human.from_optional_dicts_by_key(None).is_none()
True
>>> Human.from_optional_dicts_by_key({}).get()
{}
"""
return TOption(
cls.from_dicts_by_key(
ds, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
if ds is not None
else None
)
@classmethod
def from_json(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = False
) -> T:
"""From json string to instance
:param data: Json string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
Usage:
>>> from owlmixin.samples import Human
>>> human: Human = Human.from_json('''{
... "id": 1,
... "name": "Tom",
... "favorites": [
... {"name": "Apple", "names_by_lang": {"en": "Apple", "de": "Apfel"}},
... {"name": "Orange"}
... ]
... }''')
>>> human.id
1
>>> human.name
'Tom'
>>> human.favorites[0].names_by_lang.get()["de"]
'Apfel'
"""
return cls.from_dict(
util.load_json(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_jsonf(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = False,
) -> T:
"""From json file path to instance
:param fpath: Json file path
:param encoding: Json file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
"""
return cls.from_dict(
util.load_jsonf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_json_to_list(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = False
) -> TList[T]:
"""From json string to list of instance
:param data: Json string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
Usage:
>>> from owlmixin.samples import Human
>>> humans: TList[Human] = Human.from_json_to_list('''[
... {"id": 1, "name": "Tom", "favorites": [{"name": "Apple"}]},
... {"id": 2, "name": "John", "favorites": [{"name": "Orange"}]}
... ]''')
>>> humans[0].name
'Tom'
>>> humans[1].name
'John'
"""
return cls.from_dicts(
util.load_json(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_json_to_iterator(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = False
) -> TIterator[T]:
"""From json string to iterable instance
:param data: Json string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterable instance
Usage:
>>> from owlmixin.samples import Human
>>> humans: TIterator[Human] = Human.from_json_to_iterator('''[
... {"id": 1, "name": "Tom", "favorites": [{"name": "Apple"}]},
... {"id": 2, "name": "John", "favorites": [{"name": "Orange"}]}
... ]''')
>>> humans.next_at(1).get().name
'John'
>>> humans.next_at(0).is_none()
True
"""
return cls.from_iterable_dicts(
util.load_json(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_jsonf_to_list(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = False,
) -> TList[T]:
"""From json file path to list of instance
:param fpath: Json file path
:param encoding: Json file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
"""
return cls.from_dicts(
util.load_jsonf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_jsonf_to_iterator(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = False,
) -> TIterator[T]:
"""From json file path to iterable instance
:param fpath: Json file path
:param encoding: Json file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterable instance
"""
return cls.from_iterable_dicts(
util.load_jsonf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yaml(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = True
) -> T:
"""From yaml string to instance
:param data: Yaml string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
Usage:
>>> from owlmixin.samples import Human
>>> human: Human = Human.from_yaml('''
... id: 1
... name: Tom
... favorites:
... - name: Apple
... names_by_lang:
... en: Apple
... de: Apfel
... - name: Orange
... ''')
>>> human.id
1
>>> human.name
'Tom'
>>> human.favorites[0].names_by_lang.get()["de"]
'Apfel'
"""
return cls.from_dict(
util.load_yaml(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yamlf(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = True,
) -> T:
"""From yaml file path to instance
:param fpath: Yaml file path
:param encoding: Yaml file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
"""
return cls.from_dict(
util.load_yamlf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yaml_to_list(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = True
) -> TList[T]:
"""From yaml string to list of instance
:param data: Yaml string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
Usage:
>>> from owlmixin.samples import Human
>>> humans: TList[Human] = Human.from_yaml_to_list('''
... - id: 1
... name: Tom
... favorites:
... - name: Apple
... - id: 2
... name: John
... favorites:
... - name: Orange
... ''')
>>> humans[0].name
'Tom'
>>> humans[1].name
'John'
>>> humans[0].favorites[0].name
'Apple'
"""
return cls.from_dicts(
util.load_yaml(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yaml_to_iterator(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = True
) -> TIterator[T]:
"""From yaml string to iterable instance
:param data: Yaml string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterable instance
Usage:
>>> from owlmixin.samples import Human
>>> humans: TIterator[Human] = Human.from_yaml_to_iterator('''
... - id: 1
... name: Tom
... favorites:
... - name: Apple
... - id: 2
... name: John
... favorites:
... - name: Orange
... ''')
>>> human1 = humans.next_at(1).get()
>>> human1.name
'John'
>>> humans.next_at(0).is_none()
True
>>> human1.favorites[0].name
'Orange'
"""
return cls.from_iterable_dicts(
util.load_yaml(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yamlf_to_list(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = True,
) -> TList[T]:
"""From yaml file path to list of instance
:param fpath: Yaml file path
:param encoding: Yaml file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
"""
return cls.from_dicts(
util.load_yamlf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yamlf_to_iterator(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = True,
) -> TIterator[T]:
"""From yaml file path to iterable instance
:param fpath: Yaml file path
:param encoding: Yaml file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterable instance
"""
return cls.from_iterable_dicts(
util.load_yamlf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_csvf_to_list(
cls,
fpath: str,
fieldnames: Optional[Sequence[str]] = None,
encoding: str = "utf8",
*,
force_snake_case: bool = True,
restrict: bool = True,
) -> TList[T]:
"""From csv file path to list of instance
:param fpath: Csv file path
:param fieldnames: Specify csv header names if not included in the file
:param encoding: Csv file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param restrict: Prohibit extra parameters if True
:return: List of Instance
"""
return cls.from_dicts(
list(util.load_csvf(fpath, fieldnames, encoding)),
force_snake_case=force_snake_case,
force_cast=True,
restrict=restrict,
)
@classmethod
def from_csvf_to_iterator(
cls,
fpath: str,
fieldnames: Optional[Sequence[str]] = None,
encoding: str = "utf8",
*,
force_snake_case: bool = True,
restrict: bool = True,
) -> TIterator[T]:
"""From csv file path to iterable instance
:param fpath: Csv file path
:param fieldnames: Specify csv header names if not included in the file
:param encoding: Csv file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param restrict: Prohibit extra parameters if True
:return: Iterable Instance
"""
return cls.from_iterable_dicts(
util.load_csvf(fpath, fieldnames, encoding),
force_snake_case=force_snake_case,
force_cast=True,
restrict=restrict,
)
@classmethod
def from_json_url(
cls, url: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = False
) -> T:
"""From url which returns json to instance
:param url: Url which returns json
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
"""
return cls.from_dict(
util.load_json_url(url),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
|
"""Chapter 22 Practice Questions
Answers Chapter 22 Practice Questions via Python code.
"""
from pythontutorials.books.CrackingCodes.Ch18.vigenereCipher import decryptMessage
def main():
# 1. How many prime numbers are there?
# Hint: Check page 322
message = "Iymdi ah rv urxxeqfi fjdjqv gu gzuqw clunijh." # Encrypted with key "PRIMES"
#print(decryptMessage(blank, blank)) # Fill in the blanks
# 2. What are integers that are not prime called?
# Hint: Check page 323
message = "Vbmggpcw wlvx njr bhv pctqh emi psyzxf czxtrwdxr fhaugrd." # Encrypted with key "NOTCALLEDEVENS"
#print(decryptMessage(blank, blank)) # Fill in the blanks
# 3. What are two algorithms for finding prime numbers?
# Hint: Check page 323
# Encrypted with key "ALGORITHMS"
message = "Tsk hyzxl mdgzxwkpfz gkeo ob kpbz ngov gfv: bkpmd dtbwjqhu, eaegk cw Mkhfgsenseml, hzv Rlhwe-Ubsxwr."
#print(decryptMessage(blank, blank)) # Fill in the blanks
if __name__ == '__main__':
main()
|
from __future__ import absolute_import, print_function, division
from future.utils import with_metaclass
import numpy as np
import scipy as sp
from abc import ABCMeta, abstractmethod
from scipy import integrate
import scipy.interpolate as interpolate
from . import core
from . import refstate
__all__ = ['GammaEos','GammaCalc']
def set_calculator(eos_mod, kind, kind_opts):
assert kind in kind_opts, (
kind + ' is not a valid thermal calculator. '+
'You must select one of: ' + str(kind_opts))
eos_mod._kind = kind
if kind=='GammaPowLaw':
calc = _GammaPowLaw(eos_mod)
elif kind=='GammaShiftPowLaw':
calc = _GammaShiftPowLaw(eos_mod)
elif kind=='GammaFiniteStrain':
calc = _GammaFiniteStrain(eos_mod)
else:
raise NotImplementedError(kind+' is not a valid '+
'GammaEos Calculator.')
eos_mod._add_calculator(calc, calc_type='gamma')
pass
class GammaEos(with_metaclass(ABCMeta, core.Eos)):
"""
EOS model for compression dependence of Grüneisen parameter.
Parameters
----------
Thermodyn properties depend only on volume
"""
_kind_opts = ['GammaPowLaw','GammaShiftPowLaw','GammaFiniteStrain']
def __init__(self, kind='GammaPowLaw', natom=1, model_state={}):
self._pre_init(natom=natom)
set_calculator(self, kind, self._kind_opts)
ref_compress_state='P0'
ref_thermal_state='T0'
ref_energy_type = 'E0'
refstate.set_calculator(self, ref_compress_state=ref_compress_state,
ref_thermal_state=ref_thermal_state,
ref_energy_type=ref_energy_type)
# self._set_ref_state()
self._post_init(model_state=model_state)
pass
def __repr__(self):
calc = self.calculators['gamma']
return ("GammaEos(kind={kind}, natom={natom}, "
"model_state={model_state}, "
")"
.format(kind=repr(calc.name),
natom=repr(self.natom),
model_state=self.model_state
)
)
def _set_ref_state(self):
calc = self.calculators['gamma']
path_const = calc.path_const
if path_const=='S':
param_ref_names = []
param_ref_units = []
param_ref_defaults = []
param_ref_scales = []
else:
raise NotImplementedError(
'path_const '+path_const+' is not valid for ThermalEos.')
self._path_const = calc.path_const
self._param_ref_names = param_ref_names
self._param_ref_units = param_ref_units
self._param_ref_defaults = param_ref_defaults
self._param_ref_scales = param_ref_scales
pass
def gamma(self, V_a):
gamma_a = self.calculators['gamma']._calc_gamma(V_a)
return gamma_a
def gamma_deriv(self, V_a):
gamma_deriv_a = self.calculators['gamma']._calc_gamma_deriv(V_a)
return gamma_deriv_a
def temp(self, V_a, T0=None):
temp_a = self.calculators['gamma']._calc_temp(V_a, T0=T0)
return temp_a
class GammaCalc(with_metaclass(ABCMeta, core.Calculator)):
"""
Abstract Equation of State class for a reference Compression Path
Path can either be isothermal (T=const) or adiabatic (S=const)
For this restricted path, thermodyn properties depend only on volume
"""
def __init__(self, eos_mod):
self._eos_mod = eos_mod
self._init_params()
self._path_const = 'S'
pass
@property
def path_const( self ):
return self._path_const
####################
# Required Methods #
####################
@abstractmethod
def _init_params( self ):
"""Initialize list of calculator parameter names."""
pass
@abstractmethod
def _calc_gamma(self, V_a):
pass
@abstractmethod
def _calc_gamma_deriv(self, V_a):
pass
@abstractmethod
def _calc_temp(self, V_a, T0=None):
pass
def _calc_theta(self, V_a):
theta0 = self.eos_mod.get_param_values(param_names=['theta0'])
theta = self._calc_temp(V_a, T0=theta0)
return theta
####################
# Optional Methods #
####################
# EOS property functions
def _calc_param_deriv(self, fname, paramname, V_a, dxfrac=1e-6):
scale_a, paramkey_a = self.get_param_scale(apply_expand_adj=True )
scale = scale_a[paramkey_a==paramname][0]
# print 'scale: ' + np.str(scale)
#if (paramname is 'E0') and (fname is 'energy'):
# return np.ones(V_a.shape)
try:
fun = getattr(self, fname)
# Note that self is implicitly included
val0_a = fun(V_a)
except:
assert False, 'That is not a valid function name ' + \
'(e.g. it should be press or energy)'
try:
param = core.get_params([paramname])[0]
dparam = scale*dxfrac
# print 'param: ' + np.str(param)
# print 'dparam: ' + np.str(dparam)
except:
assert False, 'This is not a valid parameter name'
# set param value in eos_d dict
core.set_params([paramname,], [param+dparam,])
# Note that self is implicitly included
dval_a = fun(V_a) - val0_a
# reset param to original value
core.set_params([paramname], [param])
deriv_a = dval_a/dxfrac
return deriv_a
def _calc_energy_perturb(self, V_a):
"""Returns Energy pertubation basis functions resulting from fractional changes to EOS params."""
fname = 'energy'
scale_a, paramkey_a = self.get_param_scale(
apply_expand_adj=self.expand_adj)
Eperturb_a = []
for paramname in paramkey_a:
iEperturb_a = self._calc_param_deriv(fname, paramname, V_a)
Eperturb_a.append(iEperturb_a)
Eperturb_a = np.array(Eperturb_a)
return Eperturb_a, scale_a, paramkey_a
class _GammaPowLaw(GammaCalc):
_path_opts=['S']
def __init__(self, eos_mod):
super(_GammaPowLaw, self).__init__(eos_mod)
pass
def _init_params(self):
"""Initialize list of calculator parameter names."""
V0 = 100
gamma0 = 1.0
q = 1.0
self._param_names = ['V0', 'gamma0', 'q']
self._param_units = ['ang^3', '1', '1']
self._param_defaults = [V0, gamma0, q]
self._param_scales = [V0, gamma0, q]
pass
def _calc_gamma(self, V_a):
V0, gamma0, q = self.eos_mod.get_param_values(
param_names=['V0','gamma0','q'])
gamma_a = gamma0 *(V_a/V0)**q
return gamma_a
def _calc_gamma_deriv(self, V_a):
q, = self.eos_mod.get_param_values(param_names=['q'])
gamma_a = self._calc_gamma(V_a)
gamma_deriv_a = q*gamma_a/V_a
return gamma_deriv_a
def _calc_temp(self, V_a, T0=None):
if T0 is None:
T0 = self.eos_mod.refstate.ref_temp()
# T0, = self.eos_mod.get_param_values(param_names=['T0'], overrides=[T0])
gamma0, q = self.eos_mod.get_param_values(
param_names=['gamma0','q'])
gamma_a = self._calc_gamma(V_a)
T_a = T0*np.exp(-(gamma_a - gamma0)/q)
return T_a
class _GammaShiftPowLaw(GammaCalc):
"""
Shifted Power Law description of Grüneisen Parameter (Al’tshuler, 1987)
"""
_path_opts=['S']
def __init__(self, eos_mod):
super(_GammaShiftPowLaw, self).__init__(eos_mod)
pass
def _init_params(self):
"""Initialize list of calculator parameter names."""
V0 = 100
gamma0 = 1.5
gamma_inf = 2/3
beta = 1.4
T0 = 300
self._param_names = ['V0', 'gamma0', 'gamma_inf', 'beta', 'T0']
self._param_units = ['ang^3', '1', '1', '1', 'K']
self._param_defaults = [V0, gamma0, gamma_inf, beta, T0]
self._param_scales = [V0, gamma0, gamma_inf, beta, T0]
pass
def _calc_gamma(self, V_a):
V0, gamma0, gamma_inf, beta = self.eos_mod.get_param_values(
param_names=['V0','gamma0','gamma_inf','beta'])
gamma_a = gamma_inf + (gamma0-gamma_inf)*(V_a/V0)**beta
return gamma_a
def _calc_gamma_deriv(self, V_a):
gamma_inf, beta = self.eos_mod.get_param_values(
param_names=['gamma_inf','beta'])
gamma_a = self._calc_gamma(V_a)
gamma_deriv_a = beta/V_a*(gamma_a-gamma_inf)
return gamma_deriv_a
def _calc_temp(self, V_a, T0=None):
T0, = self.eos_mod.get_param_values(param_names=['T0'], overrides=[T0])
V0, gamma0, gamma_inf, beta = self.eos_mod.get_param_values(
param_names=['V0','gamma0','gamma_inf','beta'])
gamma_a = self._calc_gamma(V_a)
x = V_a/V0
T_a = T0*x**(-gamma_inf)*np.exp((gamma0-gamma_inf)/beta*(1-x**beta))
return T_a
class _GammaFiniteStrain(GammaCalc):
_path_opts=['S']
def __init__(self, eos_mod):
super(_GammaFiniteStrain, self).__init__(eos_mod)
pass
def _init_params(self):
"""Initialize list of calculator parameter names."""
V0 = 100
gamma0 = 0.5
gammap0 = -2
self._param_names = ['V0', 'gamma0', 'gammap0']
self._param_units = ['ang^3', '1', '1']
self._param_defaults = [V0, gamma0, gammap0]
self._param_scales = [V0, gamma0, gammap0]
pass
def _calc_strain_coefs(self):
V0, gamma0, gammap0 = self.eos_mod.get_param_values(
param_names=['V0','gamma0','gammap0'])
a1 = 6*gamma0
a2 = -12*gamma0 +36*gamma0**2 -18*gammap0
return a1, a2
def _calc_fstrain(self, V_a, deriv=False):
V0, = self.eos_mod.get_param_values(param_names=['V0'])
x = V_a/V0
if deriv:
return -1/(3*V0)*x**(-5/3)
else:
return 1/2*(x**(-2/3)-1)
pass
def _calc_gamma(self, V_a):
a1, a2 = self._calc_strain_coefs()
fstr_a = self._calc_fstrain(V_a)
gamma_a = (2*fstr_a+1)*(a1+a2*fstr_a)/(6*(1+a1*fstr_a+0.5*a2*fstr_a**2))
return gamma_a
def _calc_gamma_deriv(self, V_a):
a1, a2 = self._calc_strain_coefs()
fstr_a = self._calc_fstrain(V_a)
fstr_deriv = self._calc_fstrain(V_a, deriv=True)
gamma_a = self._calc_gamma(V_a)
gamma_deriv_a = gamma_a*fstr_deriv*(
2/(2*fstr_a+1)+a2/(a1+a2*fstr_a)
-(a1+a2*fstr_a)/(1+a1*fstr_a+.5*a2*fstr_a**2))
return gamma_deriv_a
def _calc_temp(self, V_a, T0=None):
if T0 is None:
T0 = self.eos_mod.refstate.ref_temp()
a1, a2 = self._calc_strain_coefs()
fstr_a = self._calc_fstrain(V_a)
T_a = T0*np.sqrt(1 + a1*fstr_a + 0.5*a2*fstr_a**2)
return T_a
|
import numpy
import pytest
import theano
class TestInputLayer:
@pytest.fixture
def layer(self):
from lasagne.layers.input import InputLayer
return InputLayer((3, 2))
def test_input_var(self, layer):
assert layer.input_var.ndim == 2
def test_get_output_shape(self, layer):
assert layer.get_output_shape() == (3, 2)
def test_get_output_without_arguments(self, layer):
assert layer.get_output() is layer.input_var
def test_get_output_input_is_variable(self, layer):
variable = theano.Variable("myvariable")
assert layer.get_output(variable) is variable
def test_get_output_input_is_array(self, layer):
input = [[1,2,3]]
output = layer.get_output(input)
assert numpy.all(output.eval() == input)
def test_get_output_input_is_a_mapping(self, layer):
input = {layer: theano.tensor.matrix()}
assert layer.get_output(input) is input[layer]
def test_input_var_name(self, layer):
assert layer.input_var.name == "input"
def test_named_layer_input_var_name(self):
from lasagne.layers.input import InputLayer
layer = InputLayer((3, 2), name="foo")
assert layer.input_var.name == "foo.input"
|
'''
Created on Nov 19, 2011
@author: scottporter
'''
class BaseSingleton(object):
_instance = None
@classmethod
def get_instance(cls):
if cls._instance is None:
cls._instance = cls()
return cls._instance
|
"""Device tracker for Synology SRM routers."""
from __future__ import annotations
import logging
import synology_srm
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType
_LOGGER = logging.getLogger(__name__)
DEFAULT_USERNAME = "admin"
DEFAULT_PORT = 8001
DEFAULT_SSL = True
DEFAULT_VERIFY_SSL = False
PLATFORM_SCHEMA = DEVICE_TRACKER_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
}
)
ATTRIBUTE_ALIAS = {
"band": None,
"connection": None,
"current_rate": None,
"dev_type": None,
"hostname": None,
"ip6_addr": None,
"ip_addr": None,
"is_baned": "is_banned",
"is_beamforming_on": None,
"is_guest": None,
"is_high_qos": None,
"is_low_qos": None,
"is_manual_dev_type": None,
"is_manual_hostname": None,
"is_online": None,
"is_parental_controled": "is_parental_controlled",
"is_qos": None,
"is_wireless": None,
"mac": None,
"max_rate": None,
"mesh_node_id": None,
"rate_quality": None,
"signalstrength": "signal_strength",
"transferRXRate": "transfer_rx_rate",
"transferTXRate": "transfer_tx_rate",
}
def get_scanner(hass: HomeAssistant, config: ConfigType) -> DeviceScanner | None:
"""Validate the configuration and return Synology SRM scanner."""
scanner = SynologySrmDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
class SynologySrmDeviceScanner(DeviceScanner):
"""This class scans for devices connected to a Synology SRM router."""
def __init__(self, config):
"""Initialize the scanner."""
self.client = synology_srm.Client(
host=config[CONF_HOST],
port=config[CONF_PORT],
username=config[CONF_USERNAME],
password=config[CONF_PASSWORD],
https=config[CONF_SSL],
)
if not config[CONF_VERIFY_SSL]:
self.client.http.disable_https_verify()
self.devices = []
self.success_init = self._update_info()
_LOGGER.info("Synology SRM scanner initialized")
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return [device["mac"] for device in self.devices]
def get_extra_attributes(self, device) -> dict:
"""Get the extra attributes of a device."""
device = next(
(result for result in self.devices if result["mac"] == device), None
)
filtered_attributes: dict[str, str] = {}
if not device:
return filtered_attributes
for attribute, alias in ATTRIBUTE_ALIAS.items():
if (value := device.get(attribute)) is None:
continue
attr = alias or attribute
filtered_attributes[attr] = value
return filtered_attributes
def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
filter_named = [
result["hostname"] for result in self.devices if result["mac"] == device
]
if filter_named:
return filter_named[0]
return None
def _update_info(self):
"""Check the router for connected devices."""
_LOGGER.debug("Scanning for connected devices")
try:
self.devices = self.client.core.get_network_nsm_device({"is_online": True})
except synology_srm.http.SynologyException as ex:
_LOGGER.error("Error with the Synology SRM: %s", ex)
return False
_LOGGER.debug("Found %d device(s) connected to the router", len(self.devices))
return True
|
'''
utils.py: helper functions for DLP api
Copyright (c) 2017 Vanessa Sochat
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
from som.logger import bot
import os
import sys
def paginate_items(items,size=100):
'''paginate_items will return a list of lists, each of a particular max
size
'''
groups = []
for idx in range(0, len(items), size):
group = items[idx:idx+size]
groups.append(group)
return groups
def clean_text(text,findings):
'''clean_text will remove phi findings from a text object
:param text: the original text sent to the content.inspect DLP endpoint
:param findings: the full response for the text.
'''
if 'findings' in findings:
for finding in findings['findings']:
label = "**%s**" %finding['infoType']['name']
# Note sure if this is best strategy, we can start with it
text = text.replace(finding['quote'],label)
return text
|
'''
Description:
Extract the feature from the text in English.
Version:
python3
'''
from sklearn.feature_extraction.text import CountVectorizer
VECTORIZER = CountVectorizer(min_df=1)
CORPUS = [
'This is the first document.',
'This is the second second document.',
'And the third one.',
'Is this the first document?'
]
X = VECTORIZER.fit_transform(CORPUS)
FEATURE_NAMES = VECTORIZER.get_feature_names()
print(FEATURE_NAMES)
|
import _plotly_utils.basevalidators
class YValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(self, plotly_name="y", parent_name="bar", **kwargs):
super(YValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
anim=kwargs.pop("anim", True),
edit_type=kwargs.pop("edit_type", "calc+clearAxisTypes"),
role=kwargs.pop("role", "data"),
**kwargs
)
|
from .record import (
Metadata,
Record,
)
__all__ = ['Parser']
class Parser:
def __init__(self, store):
self.store = store
def parse_record(self, metadata, line):
factors = line.split('|')
if len(factors) < 7:
return
registry, cc, type_, start, value, dete, status = factors[:7]
if type_ not in ('ipv4', 'ipv6'):
return
return Record(metadata, start, type_, value, cc)
def do(self, fp):
metadata = None
for line in fp:
line = line[:-1]
if line.startswith('#') or line.endswith('summary'):
continue
if metadata is None:
version, registry, serial, records,\
startdate, enddate, utcoffset = line.split('|')[:7]
metadata = Metadata(registry, version, serial)
continue
record = self.parse_record(metadata, line)
if record is None:
continue
self.store.persist(record)
|
class Solution(object):
def isPalindrome(self, s):
"""
:type s: str
:rtype: bool
"""
if not s:
return True
start = 0
end = len(s)-1
s = s.lower()
while start < end:
while start < end and not s[start].isalnum():
start += 1
while start < end and not s[end].isalnum():
end -= 1
if s[start] == s[end]:
start += 1
end -= 1
else:
return False
return True
|
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.subtitle_v30_rc2 import SubtitleV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.title_v30_rc2 import TitleV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.translated_title_v30_rc2 import TranslatedTitleV30Rc2 # noqa: F401,E501
class WorkTitleV30Rc2(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'title': 'TitleV30Rc2',
'subtitle': 'SubtitleV30Rc2',
'translated_title': 'TranslatedTitleV30Rc2'
}
attribute_map = {
'title': 'title',
'subtitle': 'subtitle',
'translated_title': 'translated-title'
}
def __init__(self, title=None, subtitle=None, translated_title=None): # noqa: E501
"""WorkTitleV30Rc2 - a model defined in Swagger""" # noqa: E501
self._title = None
self._subtitle = None
self._translated_title = None
self.discriminator = None
if title is not None:
self.title = title
if subtitle is not None:
self.subtitle = subtitle
if translated_title is not None:
self.translated_title = translated_title
@property
def title(self):
"""Gets the title of this WorkTitleV30Rc2. # noqa: E501
:return: The title of this WorkTitleV30Rc2. # noqa: E501
:rtype: TitleV30Rc2
"""
return self._title
@title.setter
def title(self, title):
"""Sets the title of this WorkTitleV30Rc2.
:param title: The title of this WorkTitleV30Rc2. # noqa: E501
:type: TitleV30Rc2
"""
self._title = title
@property
def subtitle(self):
"""Gets the subtitle of this WorkTitleV30Rc2. # noqa: E501
:return: The subtitle of this WorkTitleV30Rc2. # noqa: E501
:rtype: SubtitleV30Rc2
"""
return self._subtitle
@subtitle.setter
def subtitle(self, subtitle):
"""Sets the subtitle of this WorkTitleV30Rc2.
:param subtitle: The subtitle of this WorkTitleV30Rc2. # noqa: E501
:type: SubtitleV30Rc2
"""
self._subtitle = subtitle
@property
def translated_title(self):
"""Gets the translated_title of this WorkTitleV30Rc2. # noqa: E501
:return: The translated_title of this WorkTitleV30Rc2. # noqa: E501
:rtype: TranslatedTitleV30Rc2
"""
return self._translated_title
@translated_title.setter
def translated_title(self, translated_title):
"""Sets the translated_title of this WorkTitleV30Rc2.
:param translated_title: The translated_title of this WorkTitleV30Rc2. # noqa: E501
:type: TranslatedTitleV30Rc2
"""
self._translated_title = translated_title
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(WorkTitleV30Rc2, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, WorkTitleV30Rc2):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
import collections
import json
import unittest
import responses
from requests import HTTPError
from mock import patch
from batfish import Client
from batfish.__about__ import __version__
class TestClientAuthorize(unittest.TestCase):
def setUp(self):
with patch('batfish.client.read_token_from_conf',
return_value=None):
self.cli = Client()
@responses.activate
def test_authorize_error(self):
url = "https://api.digitalocean.com/v2/actions"
responses.add(responses.GET, url,
body='{"error": "something"}', status=500,
content_type="application/json")
with self.assertRaises(HTTPError):
self.cli.authorize("test_token")
@responses.activate
def test_authorize_unauthorized(self):
url = "https://api.digitalocean.com/v2/kura"
body = {'id': "unauthorized", 'message': "Unable to authenticate you."}
responses.add(responses.GET, url, body=json.dumps(body), status=401,
content_type="application/json")
self.cli.authorize("test_token")
self.assertEquals(responses.calls[0].response.status_code, 401)
@responses.activate
def test_authorize_unauthorized(self):
url = "https://api.digitalocean.com/v2/actions"
responses.add(responses.GET, url,
body='{"error": "something"}', status=200,
content_type="application/json")
auth = self.cli.authorize("test_token")
self.assertEquals(auth, "OK")
self.assertEquals(responses.calls[0].response.status_code, 200)
|
from .. import console, fields
from ..exceptions import ConsoleError
from . import mock
import pytest
console.raw_input=mock.raw_input
def test_prompt():
field=fields.Field("test_field", "test field", fields.Field.TYPE_TEXT_ONELINE, "this is a test field")
assert console.prompt(field)=="999"
def test_input_parser():
sys_args=['-f', 'myfile']
exts=["test"]
models=["test_model"]
assert console.input_parser(models, exts, sys_args)==["test_model","myfile","test"]
with pytest.raises(ConsoleError):
console.input_parser("", "", sys_args)
|
import sys
import petsc4py
petsc4py.init(sys.argv)
from ecoli_in_pipe import head_tail
head_tail.main_fun()
|
raise SystemExit
import magic
print magic.from_file("my_image.jpg")
if magic.from_file("upload.jpg", mime=True) == "image/jpeg":
continue_uploading("upload.jpg")
else:
alert("Sorry! This file type is not allowed")
import imghdr
print imghdr.what("path/to/my/file.ext")
import binascii
def spoof_file(file, magic_number):
magic_number = binascii.unhexlify(magic_number)
with open(file, "r+b") as f:
old = f.read()
f.seek(0)
f.write(magic_number + old)
def to_ascii_bytes(string):
return " ".join(format(ord(char), '08b') for char in string)
string = "my ascii string"
"".join(hex(ord(char))[2:] for char in string)
hex_string = "6d7920617363696920737472696e67"
hex_string.decode("hex")
"".join(chr(int(hex_string[i:i+2], 16)) for i in range(0, len(hex_string), 2))
def hexdump(string, length=8):
result = []
digits = 4 if isinstance(string, unicode) else 2
for i in xrange(0, len(string), length):
s = string[i:i + length]
hexa = "".join("{:0{}X}".format(ord(x), digits) for x in s)
text = "".join(x if 0x20 <= ord(x) < 0x7F else '.' for x in s)
result.append("{:04X} {:{}} {}".format(i, hexa, length * (digits + 1), text))
return '\n'.join(result)
with open("/path/to/my_file.ext", "r") as f:
print hexdump(f.read())
import struct
num = 0x103e4
struct.pack("I", 0x103e4)
string = '\xe4\x03\x01\x00'
struct.unpack("i", string)
# (66532,)
bytes = '\x01\xc2'
struct.pack("<h", struct.unpack(">h", bytes)[0])
# '\xc2\x01'
import base64
base64.b64encode('encodings are fun...')
# 'ZW5jb2RpbmdzIGFyZSBmdW4uLi4='
base64.b64decode(_)
# 'encodings are fun...'
string = "hello\x00"
binary_string = ' '.join('{:08b}'.format(ord(char)) for char in string)
" ".join(binary_string[i:i+6] for i in range(0, len(binary_string), 6))
# '011010 000110 010101 101100 011011 000110 111100 000000'
bin_string = '011010 000110 010101 101100 011011 000110 111100 000000'
[int(b, 2) for b in bin_string.split()]
# [26, 6, 21, 44, 27, 6, 60, 0]
u'◑ \u2020'.encode('utf8')
'\xe2\x97\x91 \xe2\x80\xa0'.decode('utf8')
unicode('\xe2\x97\x91 \xe2\x80\xa0', encoding='utf8')
# u'\u25d1 \u2020'
utf8_string = 'Åêíòü'
utf8_string
unicode_string = utf8_string.decode('utf8')
unicode_string
unicode_string.encode('mac roman')
'Åêíòü'.decode('utf8').encode('ascii')
# Traceback (most recent call last):
# File "<stdin>", line 1, in <module>
# UnicodeEncodeError: 'ascii' codec can't encode characters in position 0-4: ordinal not in range(128)
file = """潍楪慢敫椠桴慧扲敬整瑸琠慨⁴獩琠敨爠獥汵⁴景琠硥⁴敢湩敤潣敤獵湩湡甠楮瑮湥敤档
牡捡整湥潣楤杮楷桴挠浯汰瑥汥⁹湵敲慬整湯獥景整牦浯愠搠晩敦敲瑮眠楲楴杮猠獹整‧⠊慔敫
牦浯攠楷楫数楤牯⥧"""
print file.decode('utf8').encode('utf16')
# ??Mojibake is the garbled text that is the result of text being decoded using an
# unintended character encoding with completely unrelated ones, often from a
# different writing system.' (Taken from en.wikipedia.org)
import ftfy
ftfy.fix_text(u"“Mojibake“ can be fixed.")
bin(0b1010 & 0b1111110111)
bin(0b1010 | 0b0110)
bin(0b10111 | 0b01000)
bin(0b100 ^ 0b110)
# '0b10'
bin(-0b1010 >> 0b10)
# '-0b11'
x = 0b1111
y = 0b1010
bin(int("{:b}{:b}".format(x, y), 2))
# '0b11111010'
bin(x << 4 | y)
# '0b11111010'
import random
import string
r = random.SystemRandom()
r.randint(0, 20)
r.random()
r.getrandbits(40)
chars = string.printable
r.choice(chars)
# Randomize the order of a sequence
seq = ['a', 'b', 'c', 'd', 'e']
r.shuffle(seq)
print seq
"ALLIGATOR".encode('rot13')
"NYYVTNGBE".encode('rot13')
plaintext = "A secret-ish message!"
"".join(chr((ord(c) + 20) % 256) for c in plaintext)
ciphertext = 'U4\x87yw\x86y\x88A}\x87|4\x81y\x87\x87u{y5'
"".join(chr((ord(c) - 20) % 256) for c in ciphertext)
# 'A secret-ish message!'
plaintext = 0b110100001101001
one_time_pad = 0b110000011100001
bin(plaintext ^ one_time_pad)
# '0b100010001000'
decrypted = 0b100010001000 ^ one_time_pad
format(decrypted, 'x').decode('hex')
# 'hi'
import os
import binascii
plaintext = "this is a secret message"
plaintext_bits = int(binascii.hexlify(plaintext), 16)
print "plaintext (ascii):", plaintext
print "plaintext (hex):", plaintext_bits
onetime_pad = int(binascii.hexlify(os.urandom(len(plaintext))), 16)
print "one-time pad: (hex):", onetime_pad
ciphertext_bits = plaintext_bits ^ onetime_pad
print "encrypted text (hex):", ciphertext_bits
decrypted_text = ciphertext_bits ^ onetime_pad
decrypted_text = binascii.unhexlify(hex(decrypted_text)[2:-1])
print "decrypted text (ascii):", decrypted_text
import random
import binascii
p1 = "this is the part where you run away"
p2 = "from bad cryptography practices."
p1 = p1.ljust(len(p2))
p2 = p2.ljust(len(p1))
p1 = int(binascii.hexlify(p1), 16)
p2 = int(binascii.hexlify(p2), 16)
otp = random.SystemRandom().getrandbits(p1.bit_length())
c1 = p1 ^ otp
c2 = p2 ^ otp # otp reuse...not good!
print "c1 ^ c2 == p1 ^ p2 ?", c1 ^ c2 == p1 ^ p2
print "c1 ^ c2 =", hex(c1 ^ c2)
crib = " the "
crib = int(binascii.hexlify(crib), 16)
xored = c1 ^ c2
print "crib =", hex(crib)
cbl = crib.bit_length()
xbl = xored.bit_length()
print
mask = (2**(cbl + 1) - 1)
fill = len(str(xbl / 8))
for s in range(0, xbl - cbl + 8, 8):
xor = (xored ^ (crib << s)) & (mask << s)
out = binascii.unhexlify(hex(xor)[2:-1])
print "{:>{}} {}".format(s/8, fill, out)
from cryptography.fernet import Fernet
key = Fernet.generate_key()
f = Fernet(key)
ciphertext = f.encrypt("this is my plaintext")
decrypted = f.decrypt(ciphertext)
print decrypted
import os
from cryptography.hazmat.primitives import padding
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
pt = "my plaintext"
backend = default_backend()
key = os.urandom(32)
iv = os.urandom(16)
padder = padding.PKCS7(128).padder()
pt = padder.update(pt) + padder.finalize()
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=backend)
encryptor = cipher.encryptor()
ct = encryptor.update(pt) + encryptor.finalize()
decryptor = cipher.decryptor()
out = decryptor.update(ct) + decryptor.finalize()
unpadder = padding.PKCS7(128).unpadder()
out = unpadder.update(out) + unpadder.finalize()
print out
import hashlib
hashlib.md5("hash me please").hexdigest()
hashlib.sha1("hash me please").hexdigest()
# '1a58c9b3d138a45519518ee42e634600d1b52153'
import os
from cryptography.hazmat.primitives.kdf.scrypt import Scrypt
from cryptography.hazmat.backends import default_backend
backend = default_backend()
salt = os.urandom(16)
kdf = Scrypt(salt=salt, length=64, n=2**14, r=8, p=1, backend=backend)
key = kdf.derive("your favorite password")
key
import hmac
import hashlib
secret_key = "my secret key"
ciphertext = "my ciphertext"
h = hmac.new(key=secret_key, msg=ciphertext, digestmod=hashlib.sha256)
print h.hexdigest()
hmac.compare_digest(h.hexdigest(), h.hexdigest())
p = 9576890767
q = 1299827
n = p * q
print n
e = 65537
phi = (p - 1) * (q - 1)
phi % e != 0
import sympy
d = sympy.numbers.igcdex(e, phi)[0]
print d
m = 12345
c = pow(m, e, n)
print c
# 3599057382134015
pow(c, d, n)
m = 0
while pow(m, e, n) != c:
m += 1
print m
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048, b
ackend=default_backend())
public_key = private_key.public_key()
private_pem = private_key.private_bytes(encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.BestAvailableEncryption('your password here'))
public_pem = public_key.public_bytes(encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo)
print public_pem
print private_pem
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
import base64
with open("path/to/public_key.pem", "rb") as key_file:
public_key = serialization.load_pem_public_key(key_file.read(),
backend=default_backend())
message = "your secret message"
ciphertext = public_key.encrypt(message,
padding.OAEP(mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None))
b64_ciphertext = base64.urlsafe_b64encode(ciphertext)
print b64_ciphertext
plaintext = private_key.decrypt(ciphertext,
padding.OAEP(mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None))
print plaintext
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
signer = private_key.signer(padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH), hashes.SHA256())
message = "A message of arbitrary length"
signer.update(message)
signature = signer.finalize()
public_key = private_key.public_key()
verifier = public_key.verifier(signature, padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH), hashes.SHA256())
verifier.update(message)
verifier.verify()
import requests
r = requests.get('https://www.google.com/imghp')
r.content[:200]
# View status code
r.status_code
# 200
r.headers
len(r.content)
r.apparent_encoding
r.elapsed
r.request.headers
# {'Accept': '*/*',
# 'Accept-Encoding': 'gzip, deflate',
# 'Connection': 'keep-alive',
# 'User-Agent': 'python-requests/2.12.4'}
custom_headers = {"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36"}
r = requests.get("https://www.google.com/imghp", headers=custom_headers)
r.request.headers
# {'Accept': '*/*',
# 'Accept-Encoding': 'gzip, deflate',
# 'Connection': 'keep-alive',
# 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36'}
import requests
import logging
import http.client as http_client
http_client.HTTPConnection.debuglevel = 1
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
r = requests.get('https://www.google.com/')
import urlparse
simple_url = "http://www.example.com/path/to/my/page"
parsed = urlparse.urlparse(simple_url)
parsed.scheme
parsed.hostname
parsed.path
url_with_query = "http://www.example.com/?page=1&key=Anvn4mo24"
query = urlparse.urlparse(url_with_query).query
urlparse.parse_qs(query)
import urllib
url = 'https://www.example.com/%5EA-url-with-%-and-%5E?page=page+with%20spaces'
urllib.unquote(url)
chars = '!@#$%^%$#)'
urllib.quote(chars)
urllib.unquote_plus(url)
urllib.quote_plus('one two')
'one+two'
import requests
from bs4 import BeautifulSoup
r = requests.get("http://www.google.com")
soup = BeautifulSoup(r.content, "lxml")
soup.find_all('p')
soup.find_all('a')
for link in soup.find_all('a'):
print link.text, link["href"]
import dryscrape
from bs4 import BeautifulSoup
session = dryscrape.Session()
session.visit("http://www.google.com")
r = session.body()
soup = BeautifulSoup(r, "lxml")
from selenium import webdriver
driver = webdriver.Chrome("/path/to/chromedriver")
driver.get("http://www.google.com")
html = driver.page_source
driver.save_screenshot("screenshot.png")
driver.quit()
import smtplib
server = smtplib.SMTP('localhost', port=1025)
server.set_debuglevel(True)
server.sendmail("me@localhost", "you@localhost", "This is an email message")
server.quit()
|
from django.contrib.auth import get_user_model
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from rest_framework import routers, serializers, viewsets, permissions
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from rest_framework.reverse import reverse
from .models import Comment
User = get_user_model()
class CommentVideoUrlHyperlinkedIdentityField(serializers.HyperlinkedIdentityField):
def get_url(self, obj,view_name,request,format):
kwargs = {
"cat_slug":obj.video.category.slug,
"vid_slug":obj.video.slug
}
# print(reverse(view_name,kwargs=kwargs))
return reverse(view_name,kwargs=kwargs,request=request,format=format)
class CommentUpdateSerializer(serializers.ModelSerializer):
user = serializers.CharField(source='user.username',read_only=True)
class Meta:
model = Comment
fields = [
'id',
'user',
'text'
]
class CommentCreateSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = [
'text',
'user',
'video',
'parent'
]
class ChildCommentSerializer(serializers.HyperlinkedModelSerializer):
# user = serializers.PrimaryKeyRelatedField(queryset=User.objects.all())
user = serializers.CharField(source='user.username',read_only=True)
class Meta:
model = Comment
fields = [
'id',
"user",
'text'
]
class CommentSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField("comment_detail_api",lookup_field="pk")
# user = serializers.PrimaryKeyRelatedField(queryset=User.objects.all())
video = CommentVideoUrlHyperlinkedIdentityField("video_detail_api")
user = serializers.CharField(source='user.username',read_only=True)
children = serializers.SerializerMethodField(read_only=True)
def get_children(self,instance):
# queryset = instance.get_children()
queryset = Comment.objects.filter(parent__pk =instance.pk)
serializer = ChildCommentSerializer(queryset,context={"request":instance}, many=True)
return serializer.data
class Meta:
model = Comment
fields = [
"url",
'id',
"children",
# "parent",
"user",
'video',
'text'
]
class CommentViewSet(viewsets.ModelViewSet):
authentication_classes = [SessionAuthentication, BasicAuthentication, JSONWebTokenAuthentication]
permission_classes = [permissions.IsAuthenticated,]
queryset = Comment.objects.all()
serializer_class = CommentSerializer
|
<<<<<<< HEAD
<<<<<<< HEAD
""" Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP852.TXT' with gencodec.py.
"""#"
import codecs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
def getregentry():
return codecs.CodecInfo(
name='cp852',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE
0x0086: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x0142, # LATIN SMALL LETTER L WITH STROKE
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x008b: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE
0x0092: 0x013a, # LATIN SMALL LETTER L WITH ACUTE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x013d, # LATIN CAPITAL LETTER L WITH CARON
0x0096: 0x013e, # LATIN SMALL LETTER L WITH CARON
0x0097: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
0x0098: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x0164, # LATIN CAPITAL LETTER T WITH CARON
0x009c: 0x0165, # LATIN SMALL LETTER T WITH CARON
0x009d: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
0x009e: 0x00d7, # MULTIPLICATION SIGN
0x009f: 0x010d, # LATIN SMALL LETTER C WITH CARON
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
0x00a5: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
0x00a6: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
0x00a7: 0x017e, # LATIN SMALL LETTER Z WITH CARON
0x00a8: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
0x00a9: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
0x00ac: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
0x00ad: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00b7: 0x011a, # LATIN CAPITAL LETTER E WITH CARON
0x00b8: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x00be: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x0102, # LATIN CAPITAL LETTER A WITH BREVE
0x00c7: 0x0103, # LATIN SMALL LETTER A WITH BREVE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x0111, # LATIN SMALL LETTER D WITH STROKE
0x00d1: 0x0110, # LATIN CAPITAL LETTER D WITH STROKE
0x00d2: 0x010e, # LATIN CAPITAL LETTER D WITH CARON
0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00d4: 0x010f, # LATIN SMALL LETTER D WITH CARON
0x00d5: 0x0147, # LATIN CAPITAL LETTER N WITH CARON
0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d8: 0x011b, # LATIN SMALL LETTER E WITH CARON
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x0162, # LATIN CAPITAL LETTER T WITH CEDILLA
0x00de: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00e3: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
0x00e4: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
0x00e5: 0x0148, # LATIN SMALL LETTER N WITH CARON
0x00e6: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x00e7: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x00e8: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE
0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00ea: 0x0155, # LATIN SMALL LETTER R WITH ACUTE
0x00eb: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x00ec: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x00ed: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00ee: 0x0163, # LATIN SMALL LETTER T WITH CEDILLA
0x00ef: 0x00b4, # ACUTE ACCENT
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x02dd, # DOUBLE ACUTE ACCENT
0x00f2: 0x02db, # OGONEK
0x00f3: 0x02c7, # CARON
0x00f4: 0x02d8, # BREVE
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x00b8, # CEDILLA
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x00a8, # DIAERESIS
0x00fa: 0x02d9, # DOT ABOVE
0x00fb: 0x0171, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
0x00fc: 0x0158, # LATIN CAPITAL LETTER R WITH CARON
0x00fd: 0x0159, # LATIN SMALL LETTER R WITH CARON
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
decoding_table = (
'\x00' # 0x0000 -> NULL
'\x01' # 0x0001 -> START OF HEADING
'\x02' # 0x0002 -> START OF TEXT
'\x03' # 0x0003 -> END OF TEXT
'\x04' # 0x0004 -> END OF TRANSMISSION
'\x05' # 0x0005 -> ENQUIRY
'\x06' # 0x0006 -> ACKNOWLEDGE
'\x07' # 0x0007 -> BELL
'\x08' # 0x0008 -> BACKSPACE
'\t' # 0x0009 -> HORIZONTAL TABULATION
'\n' # 0x000a -> LINE FEED
'\x0b' # 0x000b -> VERTICAL TABULATION
'\x0c' # 0x000c -> FORM FEED
'\r' # 0x000d -> CARRIAGE RETURN
'\x0e' # 0x000e -> SHIFT OUT
'\x0f' # 0x000f -> SHIFT IN
'\x10' # 0x0010 -> DATA LINK ESCAPE
'\x11' # 0x0011 -> DEVICE CONTROL ONE
'\x12' # 0x0012 -> DEVICE CONTROL TWO
'\x13' # 0x0013 -> DEVICE CONTROL THREE
'\x14' # 0x0014 -> DEVICE CONTROL FOUR
'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x0016 -> SYNCHRONOUS IDLE
'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
'\x18' # 0x0018 -> CANCEL
'\x19' # 0x0019 -> END OF MEDIUM
'\x1a' # 0x001a -> SUBSTITUTE
'\x1b' # 0x001b -> ESCAPE
'\x1c' # 0x001c -> FILE SEPARATOR
'\x1d' # 0x001d -> GROUP SEPARATOR
'\x1e' # 0x001e -> RECORD SEPARATOR
'\x1f' # 0x001f -> UNIT SEPARATOR
' ' # 0x0020 -> SPACE
'!' # 0x0021 -> EXCLAMATION MARK
'"' # 0x0022 -> QUOTATION MARK
'#' # 0x0023 -> NUMBER SIGN
'$' # 0x0024 -> DOLLAR SIGN
'%' # 0x0025 -> PERCENT SIGN
'&' # 0x0026 -> AMPERSAND
"'" # 0x0027 -> APOSTROPHE
'(' # 0x0028 -> LEFT PARENTHESIS
')' # 0x0029 -> RIGHT PARENTHESIS
'*' # 0x002a -> ASTERISK
'+' # 0x002b -> PLUS SIGN
',' # 0x002c -> COMMA
'-' # 0x002d -> HYPHEN-MINUS
'.' # 0x002e -> FULL STOP
'/' # 0x002f -> SOLIDUS
'0' # 0x0030 -> DIGIT ZERO
'1' # 0x0031 -> DIGIT ONE
'2' # 0x0032 -> DIGIT TWO
'3' # 0x0033 -> DIGIT THREE
'4' # 0x0034 -> DIGIT FOUR
'5' # 0x0035 -> DIGIT FIVE
'6' # 0x0036 -> DIGIT SIX
'7' # 0x0037 -> DIGIT SEVEN
'8' # 0x0038 -> DIGIT EIGHT
'9' # 0x0039 -> DIGIT NINE
':' # 0x003a -> COLON
';' # 0x003b -> SEMICOLON
'<' # 0x003c -> LESS-THAN SIGN
'=' # 0x003d -> EQUALS SIGN
'>' # 0x003e -> GREATER-THAN SIGN
'?' # 0x003f -> QUESTION MARK
'@' # 0x0040 -> COMMERCIAL AT
'A' # 0x0041 -> LATIN CAPITAL LETTER A
'B' # 0x0042 -> LATIN CAPITAL LETTER B
'C' # 0x0043 -> LATIN CAPITAL LETTER C
'D' # 0x0044 -> LATIN CAPITAL LETTER D
'E' # 0x0045 -> LATIN CAPITAL LETTER E
'F' # 0x0046 -> LATIN CAPITAL LETTER F
'G' # 0x0047 -> LATIN CAPITAL LETTER G
'H' # 0x0048 -> LATIN CAPITAL LETTER H
'I' # 0x0049 -> LATIN CAPITAL LETTER I
'J' # 0x004a -> LATIN CAPITAL LETTER J
'K' # 0x004b -> LATIN CAPITAL LETTER K
'L' # 0x004c -> LATIN CAPITAL LETTER L
'M' # 0x004d -> LATIN CAPITAL LETTER M
'N' # 0x004e -> LATIN CAPITAL LETTER N
'O' # 0x004f -> LATIN CAPITAL LETTER O
'P' # 0x0050 -> LATIN CAPITAL LETTER P
'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
'R' # 0x0052 -> LATIN CAPITAL LETTER R
'S' # 0x0053 -> LATIN CAPITAL LETTER S
'T' # 0x0054 -> LATIN CAPITAL LETTER T
'U' # 0x0055 -> LATIN CAPITAL LETTER U
'V' # 0x0056 -> LATIN CAPITAL LETTER V
'W' # 0x0057 -> LATIN CAPITAL LETTER W
'X' # 0x0058 -> LATIN CAPITAL LETTER X
'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
'Z' # 0x005a -> LATIN CAPITAL LETTER Z
'[' # 0x005b -> LEFT SQUARE BRACKET
'\\' # 0x005c -> REVERSE SOLIDUS
']' # 0x005d -> RIGHT SQUARE BRACKET
'^' # 0x005e -> CIRCUMFLEX ACCENT
'_' # 0x005f -> LOW LINE
'`' # 0x0060 -> GRAVE ACCENT
'a' # 0x0061 -> LATIN SMALL LETTER A
'b' # 0x0062 -> LATIN SMALL LETTER B
'c' # 0x0063 -> LATIN SMALL LETTER C
'd' # 0x0064 -> LATIN SMALL LETTER D
'e' # 0x0065 -> LATIN SMALL LETTER E
'f' # 0x0066 -> LATIN SMALL LETTER F
'g' # 0x0067 -> LATIN SMALL LETTER G
'h' # 0x0068 -> LATIN SMALL LETTER H
'i' # 0x0069 -> LATIN SMALL LETTER I
'j' # 0x006a -> LATIN SMALL LETTER J
'k' # 0x006b -> LATIN SMALL LETTER K
'l' # 0x006c -> LATIN SMALL LETTER L
'm' # 0x006d -> LATIN SMALL LETTER M
'n' # 0x006e -> LATIN SMALL LETTER N
'o' # 0x006f -> LATIN SMALL LETTER O
'p' # 0x0070 -> LATIN SMALL LETTER P
'q' # 0x0071 -> LATIN SMALL LETTER Q
'r' # 0x0072 -> LATIN SMALL LETTER R
's' # 0x0073 -> LATIN SMALL LETTER S
't' # 0x0074 -> LATIN SMALL LETTER T
'u' # 0x0075 -> LATIN SMALL LETTER U
'v' # 0x0076 -> LATIN SMALL LETTER V
'w' # 0x0077 -> LATIN SMALL LETTER W
'x' # 0x0078 -> LATIN SMALL LETTER X
'y' # 0x0079 -> LATIN SMALL LETTER Y
'z' # 0x007a -> LATIN SMALL LETTER Z
'{' # 0x007b -> LEFT CURLY BRACKET
'|' # 0x007c -> VERTICAL LINE
'}' # 0x007d -> RIGHT CURLY BRACKET
'~' # 0x007e -> TILDE
'\x7f' # 0x007f -> DELETE
'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
'\u016f' # 0x0085 -> LATIN SMALL LETTER U WITH RING ABOVE
'\u0107' # 0x0086 -> LATIN SMALL LETTER C WITH ACUTE
'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
'\u0142' # 0x0088 -> LATIN SMALL LETTER L WITH STROKE
'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
'\u0150' # 0x008a -> LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
'\u0151' # 0x008b -> LATIN SMALL LETTER O WITH DOUBLE ACUTE
'\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\u0179' # 0x008d -> LATIN CAPITAL LETTER Z WITH ACUTE
'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\u0106' # 0x008f -> LATIN CAPITAL LETTER C WITH ACUTE
'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
'\u0139' # 0x0091 -> LATIN CAPITAL LETTER L WITH ACUTE
'\u013a' # 0x0092 -> LATIN SMALL LETTER L WITH ACUTE
'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
'\u013d' # 0x0095 -> LATIN CAPITAL LETTER L WITH CARON
'\u013e' # 0x0096 -> LATIN SMALL LETTER L WITH CARON
'\u015a' # 0x0097 -> LATIN CAPITAL LETTER S WITH ACUTE
'\u015b' # 0x0098 -> LATIN SMALL LETTER S WITH ACUTE
'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u0164' # 0x009b -> LATIN CAPITAL LETTER T WITH CARON
'\u0165' # 0x009c -> LATIN SMALL LETTER T WITH CARON
'\u0141' # 0x009d -> LATIN CAPITAL LETTER L WITH STROKE
'\xd7' # 0x009e -> MULTIPLICATION SIGN
'\u010d' # 0x009f -> LATIN SMALL LETTER C WITH CARON
'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
'\u0104' # 0x00a4 -> LATIN CAPITAL LETTER A WITH OGONEK
'\u0105' # 0x00a5 -> LATIN SMALL LETTER A WITH OGONEK
'\u017d' # 0x00a6 -> LATIN CAPITAL LETTER Z WITH CARON
'\u017e' # 0x00a7 -> LATIN SMALL LETTER Z WITH CARON
'\u0118' # 0x00a8 -> LATIN CAPITAL LETTER E WITH OGONEK
'\u0119' # 0x00a9 -> LATIN SMALL LETTER E WITH OGONEK
'\xac' # 0x00aa -> NOT SIGN
'\u017a' # 0x00ab -> LATIN SMALL LETTER Z WITH ACUTE
'\u010c' # 0x00ac -> LATIN CAPITAL LETTER C WITH CARON
'\u015f' # 0x00ad -> LATIN SMALL LETTER S WITH CEDILLA
'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2591' # 0x00b0 -> LIGHT SHADE
'\u2592' # 0x00b1 -> MEDIUM SHADE
'\u2593' # 0x00b2 -> DARK SHADE
'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
'\xc1' # 0x00b5 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0x00b6 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\u011a' # 0x00b7 -> LATIN CAPITAL LETTER E WITH CARON
'\u015e' # 0x00b8 -> LATIN CAPITAL LETTER S WITH CEDILLA
'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
'\u017b' # 0x00bd -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
'\u017c' # 0x00be -> LATIN SMALL LETTER Z WITH DOT ABOVE
'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
'\u0102' # 0x00c6 -> LATIN CAPITAL LETTER A WITH BREVE
'\u0103' # 0x00c7 -> LATIN SMALL LETTER A WITH BREVE
'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
'\xa4' # 0x00cf -> CURRENCY SIGN
'\u0111' # 0x00d0 -> LATIN SMALL LETTER D WITH STROKE
'\u0110' # 0x00d1 -> LATIN CAPITAL LETTER D WITH STROKE
'\u010e' # 0x00d2 -> LATIN CAPITAL LETTER D WITH CARON
'\xcb' # 0x00d3 -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u010f' # 0x00d4 -> LATIN SMALL LETTER D WITH CARON
'\u0147' # 0x00d5 -> LATIN CAPITAL LETTER N WITH CARON
'\xcd' # 0x00d6 -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0x00d7 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\u011b' # 0x00d8 -> LATIN SMALL LETTER E WITH CARON
'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
'\u2588' # 0x00db -> FULL BLOCK
'\u2584' # 0x00dc -> LOWER HALF BLOCK
'\u0162' # 0x00dd -> LATIN CAPITAL LETTER T WITH CEDILLA
'\u016e' # 0x00de -> LATIN CAPITAL LETTER U WITH RING ABOVE
'\u2580' # 0x00df -> UPPER HALF BLOCK
'\xd3' # 0x00e0 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
'\xd4' # 0x00e2 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\u0143' # 0x00e3 -> LATIN CAPITAL LETTER N WITH ACUTE
'\u0144' # 0x00e4 -> LATIN SMALL LETTER N WITH ACUTE
'\u0148' # 0x00e5 -> LATIN SMALL LETTER N WITH CARON
'\u0160' # 0x00e6 -> LATIN CAPITAL LETTER S WITH CARON
'\u0161' # 0x00e7 -> LATIN SMALL LETTER S WITH CARON
'\u0154' # 0x00e8 -> LATIN CAPITAL LETTER R WITH ACUTE
'\xda' # 0x00e9 -> LATIN CAPITAL LETTER U WITH ACUTE
'\u0155' # 0x00ea -> LATIN SMALL LETTER R WITH ACUTE
'\u0170' # 0x00eb -> LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
'\xfd' # 0x00ec -> LATIN SMALL LETTER Y WITH ACUTE
'\xdd' # 0x00ed -> LATIN CAPITAL LETTER Y WITH ACUTE
'\u0163' # 0x00ee -> LATIN SMALL LETTER T WITH CEDILLA
'\xb4' # 0x00ef -> ACUTE ACCENT
'\xad' # 0x00f0 -> SOFT HYPHEN
'\u02dd' # 0x00f1 -> DOUBLE ACUTE ACCENT
'\u02db' # 0x00f2 -> OGONEK
'\u02c7' # 0x00f3 -> CARON
'\u02d8' # 0x00f4 -> BREVE
'\xa7' # 0x00f5 -> SECTION SIGN
'\xf7' # 0x00f6 -> DIVISION SIGN
'\xb8' # 0x00f7 -> CEDILLA
'\xb0' # 0x00f8 -> DEGREE SIGN
'\xa8' # 0x00f9 -> DIAERESIS
'\u02d9' # 0x00fa -> DOT ABOVE
'\u0171' # 0x00fb -> LATIN SMALL LETTER U WITH DOUBLE ACUTE
'\u0158' # 0x00fc -> LATIN CAPITAL LETTER R WITH CARON
'\u0159' # 0x00fd -> LATIN SMALL LETTER R WITH CARON
'\u25a0' # 0x00fe -> BLACK SQUARE
'\xa0' # 0x00ff -> NO-BREAK SPACE
)
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a7: 0x00f5, # SECTION SIGN
0x00a8: 0x00f9, # DIAERESIS
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00ad: 0x00f0, # SOFT HYPHEN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b4: 0x00ef, # ACUTE ACCENT
0x00b8: 0x00f7, # CEDILLA
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c1: 0x00b5, # LATIN CAPITAL LETTER A WITH ACUTE
0x00c2: 0x00b6, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00cb: 0x00d3, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00cd: 0x00d6, # LATIN CAPITAL LETTER I WITH ACUTE
0x00ce: 0x00d7, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d3: 0x00e0, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d4: 0x00e2, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d7: 0x009e, # MULTIPLICATION SIGN
0x00da: 0x00e9, # LATIN CAPITAL LETTER U WITH ACUTE
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00dd: 0x00ed, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00fd: 0x00ec, # LATIN SMALL LETTER Y WITH ACUTE
0x0102: 0x00c6, # LATIN CAPITAL LETTER A WITH BREVE
0x0103: 0x00c7, # LATIN SMALL LETTER A WITH BREVE
0x0104: 0x00a4, # LATIN CAPITAL LETTER A WITH OGONEK
0x0105: 0x00a5, # LATIN SMALL LETTER A WITH OGONEK
0x0106: 0x008f, # LATIN CAPITAL LETTER C WITH ACUTE
0x0107: 0x0086, # LATIN SMALL LETTER C WITH ACUTE
0x010c: 0x00ac, # LATIN CAPITAL LETTER C WITH CARON
0x010d: 0x009f, # LATIN SMALL LETTER C WITH CARON
0x010e: 0x00d2, # LATIN CAPITAL LETTER D WITH CARON
0x010f: 0x00d4, # LATIN SMALL LETTER D WITH CARON
0x0110: 0x00d1, # LATIN CAPITAL LETTER D WITH STROKE
0x0111: 0x00d0, # LATIN SMALL LETTER D WITH STROKE
0x0118: 0x00a8, # LATIN CAPITAL LETTER E WITH OGONEK
0x0119: 0x00a9, # LATIN SMALL LETTER E WITH OGONEK
0x011a: 0x00b7, # LATIN CAPITAL LETTER E WITH CARON
0x011b: 0x00d8, # LATIN SMALL LETTER E WITH CARON
0x0139: 0x0091, # LATIN CAPITAL LETTER L WITH ACUTE
0x013a: 0x0092, # LATIN SMALL LETTER L WITH ACUTE
0x013d: 0x0095, # LATIN CAPITAL LETTER L WITH CARON
0x013e: 0x0096, # LATIN SMALL LETTER L WITH CARON
0x0141: 0x009d, # LATIN CAPITAL LETTER L WITH STROKE
0x0142: 0x0088, # LATIN SMALL LETTER L WITH STROKE
0x0143: 0x00e3, # LATIN CAPITAL LETTER N WITH ACUTE
0x0144: 0x00e4, # LATIN SMALL LETTER N WITH ACUTE
0x0147: 0x00d5, # LATIN CAPITAL LETTER N WITH CARON
0x0148: 0x00e5, # LATIN SMALL LETTER N WITH CARON
0x0150: 0x008a, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x0151: 0x008b, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x0154: 0x00e8, # LATIN CAPITAL LETTER R WITH ACUTE
0x0155: 0x00ea, # LATIN SMALL LETTER R WITH ACUTE
0x0158: 0x00fc, # LATIN CAPITAL LETTER R WITH CARON
0x0159: 0x00fd, # LATIN SMALL LETTER R WITH CARON
0x015a: 0x0097, # LATIN CAPITAL LETTER S WITH ACUTE
0x015b: 0x0098, # LATIN SMALL LETTER S WITH ACUTE
0x015e: 0x00b8, # LATIN CAPITAL LETTER S WITH CEDILLA
0x015f: 0x00ad, # LATIN SMALL LETTER S WITH CEDILLA
0x0160: 0x00e6, # LATIN CAPITAL LETTER S WITH CARON
0x0161: 0x00e7, # LATIN SMALL LETTER S WITH CARON
0x0162: 0x00dd, # LATIN CAPITAL LETTER T WITH CEDILLA
0x0163: 0x00ee, # LATIN SMALL LETTER T WITH CEDILLA
0x0164: 0x009b, # LATIN CAPITAL LETTER T WITH CARON
0x0165: 0x009c, # LATIN SMALL LETTER T WITH CARON
0x016e: 0x00de, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x016f: 0x0085, # LATIN SMALL LETTER U WITH RING ABOVE
0x0170: 0x00eb, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x0171: 0x00fb, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
0x0179: 0x008d, # LATIN CAPITAL LETTER Z WITH ACUTE
0x017a: 0x00ab, # LATIN SMALL LETTER Z WITH ACUTE
0x017b: 0x00bd, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x017c: 0x00be, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x017d: 0x00a6, # LATIN CAPITAL LETTER Z WITH CARON
0x017e: 0x00a7, # LATIN SMALL LETTER Z WITH CARON
0x02c7: 0x00f3, # CARON
0x02d8: 0x00f4, # BREVE
0x02d9: 0x00fa, # DOT ABOVE
0x02db: 0x00f2, # OGONEK
0x02dd: 0x00f1, # DOUBLE ACUTE ACCENT
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
=======
""" Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP852.TXT' with gencodec.py.
"""#"
import codecs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
def getregentry():
return codecs.CodecInfo(
name='cp852',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE
0x0086: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x0142, # LATIN SMALL LETTER L WITH STROKE
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x008b: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE
0x0092: 0x013a, # LATIN SMALL LETTER L WITH ACUTE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x013d, # LATIN CAPITAL LETTER L WITH CARON
0x0096: 0x013e, # LATIN SMALL LETTER L WITH CARON
0x0097: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
0x0098: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x0164, # LATIN CAPITAL LETTER T WITH CARON
0x009c: 0x0165, # LATIN SMALL LETTER T WITH CARON
0x009d: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
0x009e: 0x00d7, # MULTIPLICATION SIGN
0x009f: 0x010d, # LATIN SMALL LETTER C WITH CARON
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
0x00a5: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
0x00a6: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
0x00a7: 0x017e, # LATIN SMALL LETTER Z WITH CARON
0x00a8: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
0x00a9: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
0x00ac: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
0x00ad: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00b7: 0x011a, # LATIN CAPITAL LETTER E WITH CARON
0x00b8: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x00be: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x0102, # LATIN CAPITAL LETTER A WITH BREVE
0x00c7: 0x0103, # LATIN SMALL LETTER A WITH BREVE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x0111, # LATIN SMALL LETTER D WITH STROKE
0x00d1: 0x0110, # LATIN CAPITAL LETTER D WITH STROKE
0x00d2: 0x010e, # LATIN CAPITAL LETTER D WITH CARON
0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00d4: 0x010f, # LATIN SMALL LETTER D WITH CARON
0x00d5: 0x0147, # LATIN CAPITAL LETTER N WITH CARON
0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d8: 0x011b, # LATIN SMALL LETTER E WITH CARON
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x0162, # LATIN CAPITAL LETTER T WITH CEDILLA
0x00de: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00e3: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
0x00e4: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
0x00e5: 0x0148, # LATIN SMALL LETTER N WITH CARON
0x00e6: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x00e7: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x00e8: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE
0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00ea: 0x0155, # LATIN SMALL LETTER R WITH ACUTE
0x00eb: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x00ec: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x00ed: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00ee: 0x0163, # LATIN SMALL LETTER T WITH CEDILLA
0x00ef: 0x00b4, # ACUTE ACCENT
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x02dd, # DOUBLE ACUTE ACCENT
0x00f2: 0x02db, # OGONEK
0x00f3: 0x02c7, # CARON
0x00f4: 0x02d8, # BREVE
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x00b8, # CEDILLA
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x00a8, # DIAERESIS
0x00fa: 0x02d9, # DOT ABOVE
0x00fb: 0x0171, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
0x00fc: 0x0158, # LATIN CAPITAL LETTER R WITH CARON
0x00fd: 0x0159, # LATIN SMALL LETTER R WITH CARON
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
decoding_table = (
'\x00' # 0x0000 -> NULL
'\x01' # 0x0001 -> START OF HEADING
'\x02' # 0x0002 -> START OF TEXT
'\x03' # 0x0003 -> END OF TEXT
'\x04' # 0x0004 -> END OF TRANSMISSION
'\x05' # 0x0005 -> ENQUIRY
'\x06' # 0x0006 -> ACKNOWLEDGE
'\x07' # 0x0007 -> BELL
'\x08' # 0x0008 -> BACKSPACE
'\t' # 0x0009 -> HORIZONTAL TABULATION
'\n' # 0x000a -> LINE FEED
'\x0b' # 0x000b -> VERTICAL TABULATION
'\x0c' # 0x000c -> FORM FEED
'\r' # 0x000d -> CARRIAGE RETURN
'\x0e' # 0x000e -> SHIFT OUT
'\x0f' # 0x000f -> SHIFT IN
'\x10' # 0x0010 -> DATA LINK ESCAPE
'\x11' # 0x0011 -> DEVICE CONTROL ONE
'\x12' # 0x0012 -> DEVICE CONTROL TWO
'\x13' # 0x0013 -> DEVICE CONTROL THREE
'\x14' # 0x0014 -> DEVICE CONTROL FOUR
'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x0016 -> SYNCHRONOUS IDLE
'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
'\x18' # 0x0018 -> CANCEL
'\x19' # 0x0019 -> END OF MEDIUM
'\x1a' # 0x001a -> SUBSTITUTE
'\x1b' # 0x001b -> ESCAPE
'\x1c' # 0x001c -> FILE SEPARATOR
'\x1d' # 0x001d -> GROUP SEPARATOR
'\x1e' # 0x001e -> RECORD SEPARATOR
'\x1f' # 0x001f -> UNIT SEPARATOR
' ' # 0x0020 -> SPACE
'!' # 0x0021 -> EXCLAMATION MARK
'"' # 0x0022 -> QUOTATION MARK
'#' # 0x0023 -> NUMBER SIGN
'$' # 0x0024 -> DOLLAR SIGN
'%' # 0x0025 -> PERCENT SIGN
'&' # 0x0026 -> AMPERSAND
"'" # 0x0027 -> APOSTROPHE
'(' # 0x0028 -> LEFT PARENTHESIS
')' # 0x0029 -> RIGHT PARENTHESIS
'*' # 0x002a -> ASTERISK
'+' # 0x002b -> PLUS SIGN
',' # 0x002c -> COMMA
'-' # 0x002d -> HYPHEN-MINUS
'.' # 0x002e -> FULL STOP
'/' # 0x002f -> SOLIDUS
'0' # 0x0030 -> DIGIT ZERO
'1' # 0x0031 -> DIGIT ONE
'2' # 0x0032 -> DIGIT TWO
'3' # 0x0033 -> DIGIT THREE
'4' # 0x0034 -> DIGIT FOUR
'5' # 0x0035 -> DIGIT FIVE
'6' # 0x0036 -> DIGIT SIX
'7' # 0x0037 -> DIGIT SEVEN
'8' # 0x0038 -> DIGIT EIGHT
'9' # 0x0039 -> DIGIT NINE
':' # 0x003a -> COLON
';' # 0x003b -> SEMICOLON
'<' # 0x003c -> LESS-THAN SIGN
'=' # 0x003d -> EQUALS SIGN
'>' # 0x003e -> GREATER-THAN SIGN
'?' # 0x003f -> QUESTION MARK
'@' # 0x0040 -> COMMERCIAL AT
'A' # 0x0041 -> LATIN CAPITAL LETTER A
'B' # 0x0042 -> LATIN CAPITAL LETTER B
'C' # 0x0043 -> LATIN CAPITAL LETTER C
'D' # 0x0044 -> LATIN CAPITAL LETTER D
'E' # 0x0045 -> LATIN CAPITAL LETTER E
'F' # 0x0046 -> LATIN CAPITAL LETTER F
'G' # 0x0047 -> LATIN CAPITAL LETTER G
'H' # 0x0048 -> LATIN CAPITAL LETTER H
'I' # 0x0049 -> LATIN CAPITAL LETTER I
'J' # 0x004a -> LATIN CAPITAL LETTER J
'K' # 0x004b -> LATIN CAPITAL LETTER K
'L' # 0x004c -> LATIN CAPITAL LETTER L
'M' # 0x004d -> LATIN CAPITAL LETTER M
'N' # 0x004e -> LATIN CAPITAL LETTER N
'O' # 0x004f -> LATIN CAPITAL LETTER O
'P' # 0x0050 -> LATIN CAPITAL LETTER P
'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
'R' # 0x0052 -> LATIN CAPITAL LETTER R
'S' # 0x0053 -> LATIN CAPITAL LETTER S
'T' # 0x0054 -> LATIN CAPITAL LETTER T
'U' # 0x0055 -> LATIN CAPITAL LETTER U
'V' # 0x0056 -> LATIN CAPITAL LETTER V
'W' # 0x0057 -> LATIN CAPITAL LETTER W
'X' # 0x0058 -> LATIN CAPITAL LETTER X
'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
'Z' # 0x005a -> LATIN CAPITAL LETTER Z
'[' # 0x005b -> LEFT SQUARE BRACKET
'\\' # 0x005c -> REVERSE SOLIDUS
']' # 0x005d -> RIGHT SQUARE BRACKET
'^' # 0x005e -> CIRCUMFLEX ACCENT
'_' # 0x005f -> LOW LINE
'`' # 0x0060 -> GRAVE ACCENT
'a' # 0x0061 -> LATIN SMALL LETTER A
'b' # 0x0062 -> LATIN SMALL LETTER B
'c' # 0x0063 -> LATIN SMALL LETTER C
'd' # 0x0064 -> LATIN SMALL LETTER D
'e' # 0x0065 -> LATIN SMALL LETTER E
'f' # 0x0066 -> LATIN SMALL LETTER F
'g' # 0x0067 -> LATIN SMALL LETTER G
'h' # 0x0068 -> LATIN SMALL LETTER H
'i' # 0x0069 -> LATIN SMALL LETTER I
'j' # 0x006a -> LATIN SMALL LETTER J
'k' # 0x006b -> LATIN SMALL LETTER K
'l' # 0x006c -> LATIN SMALL LETTER L
'm' # 0x006d -> LATIN SMALL LETTER M
'n' # 0x006e -> LATIN SMALL LETTER N
'o' # 0x006f -> LATIN SMALL LETTER O
'p' # 0x0070 -> LATIN SMALL LETTER P
'q' # 0x0071 -> LATIN SMALL LETTER Q
'r' # 0x0072 -> LATIN SMALL LETTER R
's' # 0x0073 -> LATIN SMALL LETTER S
't' # 0x0074 -> LATIN SMALL LETTER T
'u' # 0x0075 -> LATIN SMALL LETTER U
'v' # 0x0076 -> LATIN SMALL LETTER V
'w' # 0x0077 -> LATIN SMALL LETTER W
'x' # 0x0078 -> LATIN SMALL LETTER X
'y' # 0x0079 -> LATIN SMALL LETTER Y
'z' # 0x007a -> LATIN SMALL LETTER Z
'{' # 0x007b -> LEFT CURLY BRACKET
'|' # 0x007c -> VERTICAL LINE
'}' # 0x007d -> RIGHT CURLY BRACKET
'~' # 0x007e -> TILDE
'\x7f' # 0x007f -> DELETE
'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
'\u016f' # 0x0085 -> LATIN SMALL LETTER U WITH RING ABOVE
'\u0107' # 0x0086 -> LATIN SMALL LETTER C WITH ACUTE
'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
'\u0142' # 0x0088 -> LATIN SMALL LETTER L WITH STROKE
'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
'\u0150' # 0x008a -> LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
'\u0151' # 0x008b -> LATIN SMALL LETTER O WITH DOUBLE ACUTE
'\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\u0179' # 0x008d -> LATIN CAPITAL LETTER Z WITH ACUTE
'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\u0106' # 0x008f -> LATIN CAPITAL LETTER C WITH ACUTE
'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
'\u0139' # 0x0091 -> LATIN CAPITAL LETTER L WITH ACUTE
'\u013a' # 0x0092 -> LATIN SMALL LETTER L WITH ACUTE
'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
'\u013d' # 0x0095 -> LATIN CAPITAL LETTER L WITH CARON
'\u013e' # 0x0096 -> LATIN SMALL LETTER L WITH CARON
'\u015a' # 0x0097 -> LATIN CAPITAL LETTER S WITH ACUTE
'\u015b' # 0x0098 -> LATIN SMALL LETTER S WITH ACUTE
'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u0164' # 0x009b -> LATIN CAPITAL LETTER T WITH CARON
'\u0165' # 0x009c -> LATIN SMALL LETTER T WITH CARON
'\u0141' # 0x009d -> LATIN CAPITAL LETTER L WITH STROKE
'\xd7' # 0x009e -> MULTIPLICATION SIGN
'\u010d' # 0x009f -> LATIN SMALL LETTER C WITH CARON
'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
'\u0104' # 0x00a4 -> LATIN CAPITAL LETTER A WITH OGONEK
'\u0105' # 0x00a5 -> LATIN SMALL LETTER A WITH OGONEK
'\u017d' # 0x00a6 -> LATIN CAPITAL LETTER Z WITH CARON
'\u017e' # 0x00a7 -> LATIN SMALL LETTER Z WITH CARON
'\u0118' # 0x00a8 -> LATIN CAPITAL LETTER E WITH OGONEK
'\u0119' # 0x00a9 -> LATIN SMALL LETTER E WITH OGONEK
'\xac' # 0x00aa -> NOT SIGN
'\u017a' # 0x00ab -> LATIN SMALL LETTER Z WITH ACUTE
'\u010c' # 0x00ac -> LATIN CAPITAL LETTER C WITH CARON
'\u015f' # 0x00ad -> LATIN SMALL LETTER S WITH CEDILLA
'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2591' # 0x00b0 -> LIGHT SHADE
'\u2592' # 0x00b1 -> MEDIUM SHADE
'\u2593' # 0x00b2 -> DARK SHADE
'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
'\xc1' # 0x00b5 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0x00b6 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\u011a' # 0x00b7 -> LATIN CAPITAL LETTER E WITH CARON
'\u015e' # 0x00b8 -> LATIN CAPITAL LETTER S WITH CEDILLA
'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
'\u017b' # 0x00bd -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
'\u017c' # 0x00be -> LATIN SMALL LETTER Z WITH DOT ABOVE
'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
'\u0102' # 0x00c6 -> LATIN CAPITAL LETTER A WITH BREVE
'\u0103' # 0x00c7 -> LATIN SMALL LETTER A WITH BREVE
'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
'\xa4' # 0x00cf -> CURRENCY SIGN
'\u0111' # 0x00d0 -> LATIN SMALL LETTER D WITH STROKE
'\u0110' # 0x00d1 -> LATIN CAPITAL LETTER D WITH STROKE
'\u010e' # 0x00d2 -> LATIN CAPITAL LETTER D WITH CARON
'\xcb' # 0x00d3 -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u010f' # 0x00d4 -> LATIN SMALL LETTER D WITH CARON
'\u0147' # 0x00d5 -> LATIN CAPITAL LETTER N WITH CARON
'\xcd' # 0x00d6 -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0x00d7 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\u011b' # 0x00d8 -> LATIN SMALL LETTER E WITH CARON
'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
'\u2588' # 0x00db -> FULL BLOCK
'\u2584' # 0x00dc -> LOWER HALF BLOCK
'\u0162' # 0x00dd -> LATIN CAPITAL LETTER T WITH CEDILLA
'\u016e' # 0x00de -> LATIN CAPITAL LETTER U WITH RING ABOVE
'\u2580' # 0x00df -> UPPER HALF BLOCK
'\xd3' # 0x00e0 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
'\xd4' # 0x00e2 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\u0143' # 0x00e3 -> LATIN CAPITAL LETTER N WITH ACUTE
'\u0144' # 0x00e4 -> LATIN SMALL LETTER N WITH ACUTE
'\u0148' # 0x00e5 -> LATIN SMALL LETTER N WITH CARON
'\u0160' # 0x00e6 -> LATIN CAPITAL LETTER S WITH CARON
'\u0161' # 0x00e7 -> LATIN SMALL LETTER S WITH CARON
'\u0154' # 0x00e8 -> LATIN CAPITAL LETTER R WITH ACUTE
'\xda' # 0x00e9 -> LATIN CAPITAL LETTER U WITH ACUTE
'\u0155' # 0x00ea -> LATIN SMALL LETTER R WITH ACUTE
'\u0170' # 0x00eb -> LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
'\xfd' # 0x00ec -> LATIN SMALL LETTER Y WITH ACUTE
'\xdd' # 0x00ed -> LATIN CAPITAL LETTER Y WITH ACUTE
'\u0163' # 0x00ee -> LATIN SMALL LETTER T WITH CEDILLA
'\xb4' # 0x00ef -> ACUTE ACCENT
'\xad' # 0x00f0 -> SOFT HYPHEN
'\u02dd' # 0x00f1 -> DOUBLE ACUTE ACCENT
'\u02db' # 0x00f2 -> OGONEK
'\u02c7' # 0x00f3 -> CARON
'\u02d8' # 0x00f4 -> BREVE
'\xa7' # 0x00f5 -> SECTION SIGN
'\xf7' # 0x00f6 -> DIVISION SIGN
'\xb8' # 0x00f7 -> CEDILLA
'\xb0' # 0x00f8 -> DEGREE SIGN
'\xa8' # 0x00f9 -> DIAERESIS
'\u02d9' # 0x00fa -> DOT ABOVE
'\u0171' # 0x00fb -> LATIN SMALL LETTER U WITH DOUBLE ACUTE
'\u0158' # 0x00fc -> LATIN CAPITAL LETTER R WITH CARON
'\u0159' # 0x00fd -> LATIN SMALL LETTER R WITH CARON
'\u25a0' # 0x00fe -> BLACK SQUARE
'\xa0' # 0x00ff -> NO-BREAK SPACE
)
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a7: 0x00f5, # SECTION SIGN
0x00a8: 0x00f9, # DIAERESIS
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00ad: 0x00f0, # SOFT HYPHEN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b4: 0x00ef, # ACUTE ACCENT
0x00b8: 0x00f7, # CEDILLA
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c1: 0x00b5, # LATIN CAPITAL LETTER A WITH ACUTE
0x00c2: 0x00b6, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00cb: 0x00d3, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00cd: 0x00d6, # LATIN CAPITAL LETTER I WITH ACUTE
0x00ce: 0x00d7, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d3: 0x00e0, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d4: 0x00e2, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d7: 0x009e, # MULTIPLICATION SIGN
0x00da: 0x00e9, # LATIN CAPITAL LETTER U WITH ACUTE
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00dd: 0x00ed, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00fd: 0x00ec, # LATIN SMALL LETTER Y WITH ACUTE
0x0102: 0x00c6, # LATIN CAPITAL LETTER A WITH BREVE
0x0103: 0x00c7, # LATIN SMALL LETTER A WITH BREVE
0x0104: 0x00a4, # LATIN CAPITAL LETTER A WITH OGONEK
0x0105: 0x00a5, # LATIN SMALL LETTER A WITH OGONEK
0x0106: 0x008f, # LATIN CAPITAL LETTER C WITH ACUTE
0x0107: 0x0086, # LATIN SMALL LETTER C WITH ACUTE
0x010c: 0x00ac, # LATIN CAPITAL LETTER C WITH CARON
0x010d: 0x009f, # LATIN SMALL LETTER C WITH CARON
0x010e: 0x00d2, # LATIN CAPITAL LETTER D WITH CARON
0x010f: 0x00d4, # LATIN SMALL LETTER D WITH CARON
0x0110: 0x00d1, # LATIN CAPITAL LETTER D WITH STROKE
0x0111: 0x00d0, # LATIN SMALL LETTER D WITH STROKE
0x0118: 0x00a8, # LATIN CAPITAL LETTER E WITH OGONEK
0x0119: 0x00a9, # LATIN SMALL LETTER E WITH OGONEK
0x011a: 0x00b7, # LATIN CAPITAL LETTER E WITH CARON
0x011b: 0x00d8, # LATIN SMALL LETTER E WITH CARON
0x0139: 0x0091, # LATIN CAPITAL LETTER L WITH ACUTE
0x013a: 0x0092, # LATIN SMALL LETTER L WITH ACUTE
0x013d: 0x0095, # LATIN CAPITAL LETTER L WITH CARON
0x013e: 0x0096, # LATIN SMALL LETTER L WITH CARON
0x0141: 0x009d, # LATIN CAPITAL LETTER L WITH STROKE
0x0142: 0x0088, # LATIN SMALL LETTER L WITH STROKE
0x0143: 0x00e3, # LATIN CAPITAL LETTER N WITH ACUTE
0x0144: 0x00e4, # LATIN SMALL LETTER N WITH ACUTE
0x0147: 0x00d5, # LATIN CAPITAL LETTER N WITH CARON
0x0148: 0x00e5, # LATIN SMALL LETTER N WITH CARON
0x0150: 0x008a, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x0151: 0x008b, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x0154: 0x00e8, # LATIN CAPITAL LETTER R WITH ACUTE
0x0155: 0x00ea, # LATIN SMALL LETTER R WITH ACUTE
0x0158: 0x00fc, # LATIN CAPITAL LETTER R WITH CARON
0x0159: 0x00fd, # LATIN SMALL LETTER R WITH CARON
0x015a: 0x0097, # LATIN CAPITAL LETTER S WITH ACUTE
0x015b: 0x0098, # LATIN SMALL LETTER S WITH ACUTE
0x015e: 0x00b8, # LATIN CAPITAL LETTER S WITH CEDILLA
0x015f: 0x00ad, # LATIN SMALL LETTER S WITH CEDILLA
0x0160: 0x00e6, # LATIN CAPITAL LETTER S WITH CARON
0x0161: 0x00e7, # LATIN SMALL LETTER S WITH CARON
0x0162: 0x00dd, # LATIN CAPITAL LETTER T WITH CEDILLA
0x0163: 0x00ee, # LATIN SMALL LETTER T WITH CEDILLA
0x0164: 0x009b, # LATIN CAPITAL LETTER T WITH CARON
0x0165: 0x009c, # LATIN SMALL LETTER T WITH CARON
0x016e: 0x00de, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x016f: 0x0085, # LATIN SMALL LETTER U WITH RING ABOVE
0x0170: 0x00eb, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x0171: 0x00fb, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
0x0179: 0x008d, # LATIN CAPITAL LETTER Z WITH ACUTE
0x017a: 0x00ab, # LATIN SMALL LETTER Z WITH ACUTE
0x017b: 0x00bd, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x017c: 0x00be, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x017d: 0x00a6, # LATIN CAPITAL LETTER Z WITH CARON
0x017e: 0x00a7, # LATIN SMALL LETTER Z WITH CARON
0x02c7: 0x00f3, # CARON
0x02d8: 0x00f4, # BREVE
0x02d9: 0x00fa, # DOT ABOVE
0x02db: 0x00f2, # OGONEK
0x02dd: 0x00f1, # DOUBLE ACUTE ACCENT
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
""" Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP852.TXT' with gencodec.py.
"""#"
import codecs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
def getregentry():
return codecs.CodecInfo(
name='cp852',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE
0x0086: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x0142, # LATIN SMALL LETTER L WITH STROKE
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x008b: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE
0x0092: 0x013a, # LATIN SMALL LETTER L WITH ACUTE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x013d, # LATIN CAPITAL LETTER L WITH CARON
0x0096: 0x013e, # LATIN SMALL LETTER L WITH CARON
0x0097: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
0x0098: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x0164, # LATIN CAPITAL LETTER T WITH CARON
0x009c: 0x0165, # LATIN SMALL LETTER T WITH CARON
0x009d: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
0x009e: 0x00d7, # MULTIPLICATION SIGN
0x009f: 0x010d, # LATIN SMALL LETTER C WITH CARON
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
0x00a5: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
0x00a6: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
0x00a7: 0x017e, # LATIN SMALL LETTER Z WITH CARON
0x00a8: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
0x00a9: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
0x00ac: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
0x00ad: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00b7: 0x011a, # LATIN CAPITAL LETTER E WITH CARON
0x00b8: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x00be: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x0102, # LATIN CAPITAL LETTER A WITH BREVE
0x00c7: 0x0103, # LATIN SMALL LETTER A WITH BREVE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x0111, # LATIN SMALL LETTER D WITH STROKE
0x00d1: 0x0110, # LATIN CAPITAL LETTER D WITH STROKE
0x00d2: 0x010e, # LATIN CAPITAL LETTER D WITH CARON
0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00d4: 0x010f, # LATIN SMALL LETTER D WITH CARON
0x00d5: 0x0147, # LATIN CAPITAL LETTER N WITH CARON
0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d8: 0x011b, # LATIN SMALL LETTER E WITH CARON
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x0162, # LATIN CAPITAL LETTER T WITH CEDILLA
0x00de: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00e3: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
0x00e4: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
0x00e5: 0x0148, # LATIN SMALL LETTER N WITH CARON
0x00e6: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x00e7: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x00e8: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE
0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00ea: 0x0155, # LATIN SMALL LETTER R WITH ACUTE
0x00eb: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x00ec: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x00ed: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00ee: 0x0163, # LATIN SMALL LETTER T WITH CEDILLA
0x00ef: 0x00b4, # ACUTE ACCENT
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x02dd, # DOUBLE ACUTE ACCENT
0x00f2: 0x02db, # OGONEK
0x00f3: 0x02c7, # CARON
0x00f4: 0x02d8, # BREVE
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x00b8, # CEDILLA
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x00a8, # DIAERESIS
0x00fa: 0x02d9, # DOT ABOVE
0x00fb: 0x0171, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
0x00fc: 0x0158, # LATIN CAPITAL LETTER R WITH CARON
0x00fd: 0x0159, # LATIN SMALL LETTER R WITH CARON
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
decoding_table = (
'\x00' # 0x0000 -> NULL
'\x01' # 0x0001 -> START OF HEADING
'\x02' # 0x0002 -> START OF TEXT
'\x03' # 0x0003 -> END OF TEXT
'\x04' # 0x0004 -> END OF TRANSMISSION
'\x05' # 0x0005 -> ENQUIRY
'\x06' # 0x0006 -> ACKNOWLEDGE
'\x07' # 0x0007 -> BELL
'\x08' # 0x0008 -> BACKSPACE
'\t' # 0x0009 -> HORIZONTAL TABULATION
'\n' # 0x000a -> LINE FEED
'\x0b' # 0x000b -> VERTICAL TABULATION
'\x0c' # 0x000c -> FORM FEED
'\r' # 0x000d -> CARRIAGE RETURN
'\x0e' # 0x000e -> SHIFT OUT
'\x0f' # 0x000f -> SHIFT IN
'\x10' # 0x0010 -> DATA LINK ESCAPE
'\x11' # 0x0011 -> DEVICE CONTROL ONE
'\x12' # 0x0012 -> DEVICE CONTROL TWO
'\x13' # 0x0013 -> DEVICE CONTROL THREE
'\x14' # 0x0014 -> DEVICE CONTROL FOUR
'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x0016 -> SYNCHRONOUS IDLE
'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
'\x18' # 0x0018 -> CANCEL
'\x19' # 0x0019 -> END OF MEDIUM
'\x1a' # 0x001a -> SUBSTITUTE
'\x1b' # 0x001b -> ESCAPE
'\x1c' # 0x001c -> FILE SEPARATOR
'\x1d' # 0x001d -> GROUP SEPARATOR
'\x1e' # 0x001e -> RECORD SEPARATOR
'\x1f' # 0x001f -> UNIT SEPARATOR
' ' # 0x0020 -> SPACE
'!' # 0x0021 -> EXCLAMATION MARK
'"' # 0x0022 -> QUOTATION MARK
'#' # 0x0023 -> NUMBER SIGN
'$' # 0x0024 -> DOLLAR SIGN
'%' # 0x0025 -> PERCENT SIGN
'&' # 0x0026 -> AMPERSAND
"'" # 0x0027 -> APOSTROPHE
'(' # 0x0028 -> LEFT PARENTHESIS
')' # 0x0029 -> RIGHT PARENTHESIS
'*' # 0x002a -> ASTERISK
'+' # 0x002b -> PLUS SIGN
',' # 0x002c -> COMMA
'-' # 0x002d -> HYPHEN-MINUS
'.' # 0x002e -> FULL STOP
'/' # 0x002f -> SOLIDUS
'0' # 0x0030 -> DIGIT ZERO
'1' # 0x0031 -> DIGIT ONE
'2' # 0x0032 -> DIGIT TWO
'3' # 0x0033 -> DIGIT THREE
'4' # 0x0034 -> DIGIT FOUR
'5' # 0x0035 -> DIGIT FIVE
'6' # 0x0036 -> DIGIT SIX
'7' # 0x0037 -> DIGIT SEVEN
'8' # 0x0038 -> DIGIT EIGHT
'9' # 0x0039 -> DIGIT NINE
':' # 0x003a -> COLON
';' # 0x003b -> SEMICOLON
'<' # 0x003c -> LESS-THAN SIGN
'=' # 0x003d -> EQUALS SIGN
'>' # 0x003e -> GREATER-THAN SIGN
'?' # 0x003f -> QUESTION MARK
'@' # 0x0040 -> COMMERCIAL AT
'A' # 0x0041 -> LATIN CAPITAL LETTER A
'B' # 0x0042 -> LATIN CAPITAL LETTER B
'C' # 0x0043 -> LATIN CAPITAL LETTER C
'D' # 0x0044 -> LATIN CAPITAL LETTER D
'E' # 0x0045 -> LATIN CAPITAL LETTER E
'F' # 0x0046 -> LATIN CAPITAL LETTER F
'G' # 0x0047 -> LATIN CAPITAL LETTER G
'H' # 0x0048 -> LATIN CAPITAL LETTER H
'I' # 0x0049 -> LATIN CAPITAL LETTER I
'J' # 0x004a -> LATIN CAPITAL LETTER J
'K' # 0x004b -> LATIN CAPITAL LETTER K
'L' # 0x004c -> LATIN CAPITAL LETTER L
'M' # 0x004d -> LATIN CAPITAL LETTER M
'N' # 0x004e -> LATIN CAPITAL LETTER N
'O' # 0x004f -> LATIN CAPITAL LETTER O
'P' # 0x0050 -> LATIN CAPITAL LETTER P
'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
'R' # 0x0052 -> LATIN CAPITAL LETTER R
'S' # 0x0053 -> LATIN CAPITAL LETTER S
'T' # 0x0054 -> LATIN CAPITAL LETTER T
'U' # 0x0055 -> LATIN CAPITAL LETTER U
'V' # 0x0056 -> LATIN CAPITAL LETTER V
'W' # 0x0057 -> LATIN CAPITAL LETTER W
'X' # 0x0058 -> LATIN CAPITAL LETTER X
'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
'Z' # 0x005a -> LATIN CAPITAL LETTER Z
'[' # 0x005b -> LEFT SQUARE BRACKET
'\\' # 0x005c -> REVERSE SOLIDUS
']' # 0x005d -> RIGHT SQUARE BRACKET
'^' # 0x005e -> CIRCUMFLEX ACCENT
'_' # 0x005f -> LOW LINE
'`' # 0x0060 -> GRAVE ACCENT
'a' # 0x0061 -> LATIN SMALL LETTER A
'b' # 0x0062 -> LATIN SMALL LETTER B
'c' # 0x0063 -> LATIN SMALL LETTER C
'd' # 0x0064 -> LATIN SMALL LETTER D
'e' # 0x0065 -> LATIN SMALL LETTER E
'f' # 0x0066 -> LATIN SMALL LETTER F
'g' # 0x0067 -> LATIN SMALL LETTER G
'h' # 0x0068 -> LATIN SMALL LETTER H
'i' # 0x0069 -> LATIN SMALL LETTER I
'j' # 0x006a -> LATIN SMALL LETTER J
'k' # 0x006b -> LATIN SMALL LETTER K
'l' # 0x006c -> LATIN SMALL LETTER L
'm' # 0x006d -> LATIN SMALL LETTER M
'n' # 0x006e -> LATIN SMALL LETTER N
'o' # 0x006f -> LATIN SMALL LETTER O
'p' # 0x0070 -> LATIN SMALL LETTER P
'q' # 0x0071 -> LATIN SMALL LETTER Q
'r' # 0x0072 -> LATIN SMALL LETTER R
's' # 0x0073 -> LATIN SMALL LETTER S
't' # 0x0074 -> LATIN SMALL LETTER T
'u' # 0x0075 -> LATIN SMALL LETTER U
'v' # 0x0076 -> LATIN SMALL LETTER V
'w' # 0x0077 -> LATIN SMALL LETTER W
'x' # 0x0078 -> LATIN SMALL LETTER X
'y' # 0x0079 -> LATIN SMALL LETTER Y
'z' # 0x007a -> LATIN SMALL LETTER Z
'{' # 0x007b -> LEFT CURLY BRACKET
'|' # 0x007c -> VERTICAL LINE
'}' # 0x007d -> RIGHT CURLY BRACKET
'~' # 0x007e -> TILDE
'\x7f' # 0x007f -> DELETE
'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
'\u016f' # 0x0085 -> LATIN SMALL LETTER U WITH RING ABOVE
'\u0107' # 0x0086 -> LATIN SMALL LETTER C WITH ACUTE
'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
'\u0142' # 0x0088 -> LATIN SMALL LETTER L WITH STROKE
'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
'\u0150' # 0x008a -> LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
'\u0151' # 0x008b -> LATIN SMALL LETTER O WITH DOUBLE ACUTE
'\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\u0179' # 0x008d -> LATIN CAPITAL LETTER Z WITH ACUTE
'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\u0106' # 0x008f -> LATIN CAPITAL LETTER C WITH ACUTE
'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
'\u0139' # 0x0091 -> LATIN CAPITAL LETTER L WITH ACUTE
'\u013a' # 0x0092 -> LATIN SMALL LETTER L WITH ACUTE
'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
'\u013d' # 0x0095 -> LATIN CAPITAL LETTER L WITH CARON
'\u013e' # 0x0096 -> LATIN SMALL LETTER L WITH CARON
'\u015a' # 0x0097 -> LATIN CAPITAL LETTER S WITH ACUTE
'\u015b' # 0x0098 -> LATIN SMALL LETTER S WITH ACUTE
'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u0164' # 0x009b -> LATIN CAPITAL LETTER T WITH CARON
'\u0165' # 0x009c -> LATIN SMALL LETTER T WITH CARON
'\u0141' # 0x009d -> LATIN CAPITAL LETTER L WITH STROKE
'\xd7' # 0x009e -> MULTIPLICATION SIGN
'\u010d' # 0x009f -> LATIN SMALL LETTER C WITH CARON
'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
'\u0104' # 0x00a4 -> LATIN CAPITAL LETTER A WITH OGONEK
'\u0105' # 0x00a5 -> LATIN SMALL LETTER A WITH OGONEK
'\u017d' # 0x00a6 -> LATIN CAPITAL LETTER Z WITH CARON
'\u017e' # 0x00a7 -> LATIN SMALL LETTER Z WITH CARON
'\u0118' # 0x00a8 -> LATIN CAPITAL LETTER E WITH OGONEK
'\u0119' # 0x00a9 -> LATIN SMALL LETTER E WITH OGONEK
'\xac' # 0x00aa -> NOT SIGN
'\u017a' # 0x00ab -> LATIN SMALL LETTER Z WITH ACUTE
'\u010c' # 0x00ac -> LATIN CAPITAL LETTER C WITH CARON
'\u015f' # 0x00ad -> LATIN SMALL LETTER S WITH CEDILLA
'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2591' # 0x00b0 -> LIGHT SHADE
'\u2592' # 0x00b1 -> MEDIUM SHADE
'\u2593' # 0x00b2 -> DARK SHADE
'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
'\xc1' # 0x00b5 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0x00b6 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\u011a' # 0x00b7 -> LATIN CAPITAL LETTER E WITH CARON
'\u015e' # 0x00b8 -> LATIN CAPITAL LETTER S WITH CEDILLA
'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
'\u017b' # 0x00bd -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
'\u017c' # 0x00be -> LATIN SMALL LETTER Z WITH DOT ABOVE
'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
'\u0102' # 0x00c6 -> LATIN CAPITAL LETTER A WITH BREVE
'\u0103' # 0x00c7 -> LATIN SMALL LETTER A WITH BREVE
'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
'\xa4' # 0x00cf -> CURRENCY SIGN
'\u0111' # 0x00d0 -> LATIN SMALL LETTER D WITH STROKE
'\u0110' # 0x00d1 -> LATIN CAPITAL LETTER D WITH STROKE
'\u010e' # 0x00d2 -> LATIN CAPITAL LETTER D WITH CARON
'\xcb' # 0x00d3 -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u010f' # 0x00d4 -> LATIN SMALL LETTER D WITH CARON
'\u0147' # 0x00d5 -> LATIN CAPITAL LETTER N WITH CARON
'\xcd' # 0x00d6 -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0x00d7 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\u011b' # 0x00d8 -> LATIN SMALL LETTER E WITH CARON
'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
'\u2588' # 0x00db -> FULL BLOCK
'\u2584' # 0x00dc -> LOWER HALF BLOCK
'\u0162' # 0x00dd -> LATIN CAPITAL LETTER T WITH CEDILLA
'\u016e' # 0x00de -> LATIN CAPITAL LETTER U WITH RING ABOVE
'\u2580' # 0x00df -> UPPER HALF BLOCK
'\xd3' # 0x00e0 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
'\xd4' # 0x00e2 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\u0143' # 0x00e3 -> LATIN CAPITAL LETTER N WITH ACUTE
'\u0144' # 0x00e4 -> LATIN SMALL LETTER N WITH ACUTE
'\u0148' # 0x00e5 -> LATIN SMALL LETTER N WITH CARON
'\u0160' # 0x00e6 -> LATIN CAPITAL LETTER S WITH CARON
'\u0161' # 0x00e7 -> LATIN SMALL LETTER S WITH CARON
'\u0154' # 0x00e8 -> LATIN CAPITAL LETTER R WITH ACUTE
'\xda' # 0x00e9 -> LATIN CAPITAL LETTER U WITH ACUTE
'\u0155' # 0x00ea -> LATIN SMALL LETTER R WITH ACUTE
'\u0170' # 0x00eb -> LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
'\xfd' # 0x00ec -> LATIN SMALL LETTER Y WITH ACUTE
'\xdd' # 0x00ed -> LATIN CAPITAL LETTER Y WITH ACUTE
'\u0163' # 0x00ee -> LATIN SMALL LETTER T WITH CEDILLA
'\xb4' # 0x00ef -> ACUTE ACCENT
'\xad' # 0x00f0 -> SOFT HYPHEN
'\u02dd' # 0x00f1 -> DOUBLE ACUTE ACCENT
'\u02db' # 0x00f2 -> OGONEK
'\u02c7' # 0x00f3 -> CARON
'\u02d8' # 0x00f4 -> BREVE
'\xa7' # 0x00f5 -> SECTION SIGN
'\xf7' # 0x00f6 -> DIVISION SIGN
'\xb8' # 0x00f7 -> CEDILLA
'\xb0' # 0x00f8 -> DEGREE SIGN
'\xa8' # 0x00f9 -> DIAERESIS
'\u02d9' # 0x00fa -> DOT ABOVE
'\u0171' # 0x00fb -> LATIN SMALL LETTER U WITH DOUBLE ACUTE
'\u0158' # 0x00fc -> LATIN CAPITAL LETTER R WITH CARON
'\u0159' # 0x00fd -> LATIN SMALL LETTER R WITH CARON
'\u25a0' # 0x00fe -> BLACK SQUARE
'\xa0' # 0x00ff -> NO-BREAK SPACE
)
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a7: 0x00f5, # SECTION SIGN
0x00a8: 0x00f9, # DIAERESIS
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00ad: 0x00f0, # SOFT HYPHEN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b4: 0x00ef, # ACUTE ACCENT
0x00b8: 0x00f7, # CEDILLA
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c1: 0x00b5, # LATIN CAPITAL LETTER A WITH ACUTE
0x00c2: 0x00b6, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00cb: 0x00d3, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00cd: 0x00d6, # LATIN CAPITAL LETTER I WITH ACUTE
0x00ce: 0x00d7, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d3: 0x00e0, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d4: 0x00e2, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d7: 0x009e, # MULTIPLICATION SIGN
0x00da: 0x00e9, # LATIN CAPITAL LETTER U WITH ACUTE
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00dd: 0x00ed, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00fd: 0x00ec, # LATIN SMALL LETTER Y WITH ACUTE
0x0102: 0x00c6, # LATIN CAPITAL LETTER A WITH BREVE
0x0103: 0x00c7, # LATIN SMALL LETTER A WITH BREVE
0x0104: 0x00a4, # LATIN CAPITAL LETTER A WITH OGONEK
0x0105: 0x00a5, # LATIN SMALL LETTER A WITH OGONEK
0x0106: 0x008f, # LATIN CAPITAL LETTER C WITH ACUTE
0x0107: 0x0086, # LATIN SMALL LETTER C WITH ACUTE
0x010c: 0x00ac, # LATIN CAPITAL LETTER C WITH CARON
0x010d: 0x009f, # LATIN SMALL LETTER C WITH CARON
0x010e: 0x00d2, # LATIN CAPITAL LETTER D WITH CARON
0x010f: 0x00d4, # LATIN SMALL LETTER D WITH CARON
0x0110: 0x00d1, # LATIN CAPITAL LETTER D WITH STROKE
0x0111: 0x00d0, # LATIN SMALL LETTER D WITH STROKE
0x0118: 0x00a8, # LATIN CAPITAL LETTER E WITH OGONEK
0x0119: 0x00a9, # LATIN SMALL LETTER E WITH OGONEK
0x011a: 0x00b7, # LATIN CAPITAL LETTER E WITH CARON
0x011b: 0x00d8, # LATIN SMALL LETTER E WITH CARON
0x0139: 0x0091, # LATIN CAPITAL LETTER L WITH ACUTE
0x013a: 0x0092, # LATIN SMALL LETTER L WITH ACUTE
0x013d: 0x0095, # LATIN CAPITAL LETTER L WITH CARON
0x013e: 0x0096, # LATIN SMALL LETTER L WITH CARON
0x0141: 0x009d, # LATIN CAPITAL LETTER L WITH STROKE
0x0142: 0x0088, # LATIN SMALL LETTER L WITH STROKE
0x0143: 0x00e3, # LATIN CAPITAL LETTER N WITH ACUTE
0x0144: 0x00e4, # LATIN SMALL LETTER N WITH ACUTE
0x0147: 0x00d5, # LATIN CAPITAL LETTER N WITH CARON
0x0148: 0x00e5, # LATIN SMALL LETTER N WITH CARON
0x0150: 0x008a, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x0151: 0x008b, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x0154: 0x00e8, # LATIN CAPITAL LETTER R WITH ACUTE
0x0155: 0x00ea, # LATIN SMALL LETTER R WITH ACUTE
0x0158: 0x00fc, # LATIN CAPITAL LETTER R WITH CARON
0x0159: 0x00fd, # LATIN SMALL LETTER R WITH CARON
0x015a: 0x0097, # LATIN CAPITAL LETTER S WITH ACUTE
0x015b: 0x0098, # LATIN SMALL LETTER S WITH ACUTE
0x015e: 0x00b8, # LATIN CAPITAL LETTER S WITH CEDILLA
0x015f: 0x00ad, # LATIN SMALL LETTER S WITH CEDILLA
0x0160: 0x00e6, # LATIN CAPITAL LETTER S WITH CARON
0x0161: 0x00e7, # LATIN SMALL LETTER S WITH CARON
0x0162: 0x00dd, # LATIN CAPITAL LETTER T WITH CEDILLA
0x0163: 0x00ee, # LATIN SMALL LETTER T WITH CEDILLA
0x0164: 0x009b, # LATIN CAPITAL LETTER T WITH CARON
0x0165: 0x009c, # LATIN SMALL LETTER T WITH CARON
0x016e: 0x00de, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x016f: 0x0085, # LATIN SMALL LETTER U WITH RING ABOVE
0x0170: 0x00eb, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x0171: 0x00fb, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
0x0179: 0x008d, # LATIN CAPITAL LETTER Z WITH ACUTE
0x017a: 0x00ab, # LATIN SMALL LETTER Z WITH ACUTE
0x017b: 0x00bd, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x017c: 0x00be, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x017d: 0x00a6, # LATIN CAPITAL LETTER Z WITH CARON
0x017e: 0x00a7, # LATIN SMALL LETTER Z WITH CARON
0x02c7: 0x00f3, # CARON
0x02d8: 0x00f4, # BREVE
0x02d9: 0x00fa, # DOT ABOVE
0x02db: 0x00f2, # OGONEK
0x02dd: 0x00f1, # DOUBLE ACUTE ACCENT
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
""" Setup file """
import os
from setuptools import find_packages, setup
HERE = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(HERE, "README.rst")) as f:
README = f.read()
with open(os.path.join(HERE, "CHANGES.rst")) as f:
CHANGES = f.read()
REQUIREMENTS_TEST = open(os.path.join(HERE, "requirements_test.txt")).readlines()
REQUIREMENTS = [
"botocore>=0.89.0",
]
if __name__ == "__main__":
setup(
name="dynamo3",
version="1.0.0",
description="Python 3 compatible library for DynamoDB",
long_description=README + "\n\n" + CHANGES,
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
author="Steven Arcangeli",
author_email="stevearc@stevearc.com",
url="http://github.com/stevearc/dynamo3",
keywords="aws dynamo dynamodb",
include_package_data=True,
packages=find_packages(exclude=("tests",)),
license="MIT",
entry_points={
"console_scripts": [
"dynamodb-local = dynamo3.testing:run_dynamo_local",
],
"nose.plugins": [
"dynamolocal=dynamo3.testing:DynamoLocalPlugin",
],
},
python_requires=">=3.6",
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS + REQUIREMENTS_TEST,
)
|
import Config
import Database
import atexit, os, time
from flask import Flask
from concurrent.futures import ThreadPoolExecutor
from classes import CRONTask
executor = ThreadPoolExecutor(5)
app = Flask( __name__ )
@app.route( "/" )
def index( ) :
return "View Generator Service is Active!"
@app.route( "/View" )
def view( ) :
executor.submit(runTask)
return ""
def runTask( ) :
cron = CRONTask.CRONTask( )
cron.run( )
cron.killPID( )
sys.exit(0)
if __name__ == '__main__' :
app.run( debug=True, port=Config.PORT, host=Config.HOST )
|
from __future__ import unicode_literals
from django.db import migrations, models
import oktansite.models
class Migration(migrations.Migration):
dependencies = [
('oktansite', '0004_news_attachment'),
]
operations = [
migrations.AddField(
model_name='news',
name='image',
field=models.ImageField(null=True, upload_to=oktansite.models.get_upload_path_news_attachment),
),
]
|
from Models.FeatureProcessing import *
from keras.models import Sequential
from keras.layers import Activation, Dense, LSTM
from keras.optimizers import Adam, SGD
import numpy as np
import abc
from ClassificationModule import ClassificationModule
class descriptionreponamelstm(ClassificationModule):
"""A basic lstm neural network"""
def __init__(self, num_hidden_layers=3):
ClassificationModule.__init__(self, "Description and reponame LSTM", "A LSTM reading the description and reponame character by character")
hidden_size = 300
self.maxlen = 300
# Set output_size
self.output_size = 7 # Hardcoded for 7 classes
model = Sequential()
# Maximum of self.maxlen charcters allowed, each in one-hot-encoded array
model.add(LSTM(hidden_size, input_shape=(self.maxlen, getLstmCharLength())))
for _ in range(num_hidden_layers):
model.add(Dense(hidden_size))
model.add(Dense(self.output_size))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=SGD(),
metrics=['accuracy'])
self.model = model
print "\t-", self.name
def resetAllTraining(self):
"""Reset classification module to status before training"""
resetWeights(self.model)
def trainOnSample(self, sample, nb_epoch=1, shuffle=True, verbose=True):
"""Trainiere (inkrementell) mit Sample. Evtl zusätzlich mit best. Menge alter Daten, damit overfitten auf neue Daten verhindert wird."""
readme_vec = self.formatInputData(sample)
label_index = getLabelIndex(sample)
label_one_hot = np.expand_dims(oneHot(label_index), axis=0) # [1, 0, 0, ..] -> [[1, 0, 0, ..]] Necessary for keras
self.model.fit(readme_vec, label_one_hot, nb_epoch=nb_epoch, shuffle=shuffle, verbose=verbose)
def train(self, samples, nb_epoch=200, shuffle=True, verbose=True):
"""Trainiere mit Liste von Daten. Evtl weitere Paramter nötig (nb_epoch, learning_rate, ...)"""
train_samples = []
train_lables = []
for sample in samples:
formatted_sample = self.formatInputData(sample)[0].tolist()
train_samples.append(formatted_sample)
train_lables.append(oneHot(getLabelIndex(sample)))
train_lables = np.asarray(train_lables)
train_result = self.model.fit(train_samples, train_lables, nb_epoch=nb_epoch, shuffle=shuffle, verbose=verbose, class_weight=getClassWeights())
self.isTrained = True
return train_result
def predictLabel(self, sample):
"""Gibt zurück, wie der Klassifikator ein gegebenes Sample klassifizieren würde"""
if not self.isTrained:
return 0
sample = self.formatInputData(sample)
return np.argmax(self.model.predict(sample))
def predictLabelAndProbability(self, sample):
"""Return the probability the module assignes each label"""
if not self.isTrained:
return [0, 0, 0, 0, 0, 0, 0, 0]
sample = self.formatInputData(sample)
prediction = self.model.predict(sample)[0]
return [np.argmax(prediction)] + list(prediction) # [0] So 1-D array is returned
def formatInputData(self, sample):
"""Extract description and transform to vector"""
sd = getDescription(sample)
sd += getName(sample)
# Returns numpy array which contains 1 array with features
return np.expand_dims(lstmEncode(sd, maxlen=self.maxlen), axis=0)
|
__author__ = 'sekely'
'''
we are using variables almost everywhere in the code.
variables are used to store results, calculations and many more.
this of it as the famous "x" from high school
x = 5, right?
the only thing is, that in Python "x" can store anything
'''
x = 5
y = x + 3
print(y)
x = 'hello'
y = ' '
z = 'world!'
w = x + y + z
print(w)
|
import os
import requests
import sqlite3
def get_card(browser):
attributes = browser.find_elements_by_xpath('//table[@class="status"]/tbody/tr/td')
image = attributes[0].find_element_by_xpath('./img').get_attribute('src')
if attributes[1].find_element_by_xpath('./span[@class="kana"]').text:
card_name = attributes[1].find_element_by_xpath('./span[@class="kana"]').text
else:
card_name = None
card_no = attributes[2].text if attributes[2].text else None
rarity = attributes[3].text if attributes[3].text else None
expansion = attributes[4].text if attributes[4].text else None
if attributes[5].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/w.gif":
side = "Weiß"
elif attributes[5].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/s.gif":
side = "Schwarz"
else:
side = None
card_type = attributes[6].text if attributes[6].text else None
if attributes[7].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/yellow.gif":
color = "Yellow"
elif attributes[7].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/green.gif":
color = "Green"
elif attributes[7].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/red.gif":
color = "Red"
elif attributes[7].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/blue.gif":
color = "Blue"
else:
color = None
level = attributes[8].text if attributes[8].text else None
cost = attributes[9].text if attributes[9].text else None
power = attributes[10].text if attributes[10].text else None
soul = len(attributes[11].find_elements_by_xpath('./img[contains(@src, "http://ws-tcg.com/en/cardlist/partimages/soul.gif")]'))
special_attribute = attributes[13].text if attributes[13].text else None
text = attributes[14].text if attributes[14].text else None
flavor_text = attributes[15].text if attributes[15].text else None
if not os.path.exists("images"):
os.makedirs("images")
if not os.path.exists("images/" + card_no.split("/")[0]):
os.makedirs("images/" + card_no.split("/")[0])
r = requests.get(image, stream=True)
if r.status_code == 200:
with open("images/" + card_no + ".jpg", 'wb') as f:
for chunk in r:
f.write(chunk)
card = (card_name, card_no, rarity, expansion, side, card_type, color, level, cost, power, soul,
special_attribute, text, flavor_text)
connection = sqlite3.connect('cards.sqlite3')
cursor = connection.cursor()
cursor.execute('INSERT INTO cards (name, no, rarity, expansion, side, type, color, level, cost, power, soul,'
'special_attribute, text, flavor_text) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?)', card)
connection.commit()
connection.close()
|
from brightcove.core import APIObject, Field, DateTimeField, ListField, EnumField
from brightcove.objects import ItemCollection, enum
ChannelNameEnum = enum('ten', 'eleven', 'one')
PlaylistTypeEnum = enum('full_episodes', 'web_extras', 'news', 'season', 'week', 'category', 'special', 'preview')
MediaDeliveryEnum = enum('default', 'http', 'http_ios')
class EnumNumField(Field):
def __init__(self, enum_cls, help=None):
self.help = help
self.enum_cls = enum_cls
def to_python(self, value):
for i, field in enumerate(self.enum_cls._fields):
if i == value:
return field
raise Exception('Invalid Enum: %s' % value)
def from_python(self, value):
return self.enum_cls._fields[value]
class Playlist(APIObject):
_fields = ['name', 'type', 'season', 'week', 'query']
type = EnumField(PlaylistTypeEnum)
def __repr__(self):
return '<Playlist name=\'{0}\'>'.format(self.name)
class Show(APIObject):
_fields = ['showName', 'channelName', 'videoLink', 'mobileLink', 'logo', 'fanart', 'playlists']
channelName = EnumField(ChannelNameEnum)
playlists = ListField(Playlist)
def __repr__(self):
return '<Show name=\'{0}\'>'.format(self.showName)
class AMFRendition(APIObject):
_fields = ['defaultURL', 'audioOnly', 'mediaDeliveryType', 'encodingRate',
'frameHeight', 'frameWidth', 'size',
'videoCodec', 'videoContainer']
mediaDeliveryType = EnumNumField(MediaDeliveryEnum)
def __repr__(self):
return '<Rendition bitrate=\'{0}\' type=\'{1}\' frameSize=\'{2}x{3}\'>'.format(self.encodingRate, self.mediaDeliveryType, self.frameWidth, self.frameHeight)
class ShowItemCollection(ItemCollection):
_item_class = Show
items = ListField(Show)
class PlaylistItemCollection(ItemCollection):
_item_class = Playlist
items = ListField(Playlist)
class MediaRenditionItemCollection(ItemCollection):
_item_class = AMFRendition
items = ListField(AMFRendition)
|
from Bio import SeqIO
def get_proteins_for_db(fastafn, fastadelim, genefield):
"""Runs through fasta file and returns proteins accession nrs, sequences
and evidence levels for storage in lookup DB. Duplicate accessions in
fasta are accepted and removed by keeping only the last one.
"""
records = {acc: (rec, get_record_type(rec)) for acc, rec in
SeqIO.index(fastafn, 'fasta').items()}
proteins = ((x,) for x in records.keys())
sequences = ((acc, str(rec.seq)) for acc, (rec, rtype) in records.items())
desc = ((acc, get_description(rec, rtype)) for acc, (rec, rtype) in records.items() if rtype)
evid = ((acc, get_uniprot_evidence_level(rec, rtype)) for acc, (rec, rtype) in
records.items())
ensgs = [(get_ensg(rec), acc) for acc, (rec, rtype) in records.items()
if rtype == 'ensembl']
def sym_out():
symbols = ((get_symbol(rec, rtype, fastadelim, genefield), acc) for
acc, (rec, rtype) in records.items() if rtype)
othergene = ((get_other_gene(rec, fastadelim, genefield), acc) for acc, (rec, rtype) in records.items()
if not rtype and fastadelim and fastadelim in rec.description)
yield from symbols
yield from othergene
return proteins, sequences, desc, evid, ensgs, [x for x in sym_out()]
def parse_fasta(fn):
with open(fn) as fp:
for record in SeqIO.parse(fp, 'fasta'):
yield record
def get_record_type(record):
dmod = get_decoy_mod_string(record.id)
test_name = record.id
if dmod is not None:
test_name = record.id.replace(dmod, '')
if test_name.split('|')[0] in ['sp', 'tr']:
return 'swiss'
elif test_name[:3] == 'ENS':
return 'ensembl'
else:
return False
def get_decoy_mod_string(protein):
mods = ['tryp_reverse', 'reverse', 'decoy', 'random', 'shuffle']
for mod in mods:
if mod in protein:
if protein.endswith('_{}'.format(mod)):
return '_{}'.format(mod)
elif protein.endswith('{}'.format(mod)):
return mod
elif protein.startswith('{}_'.format(mod)):
return '{}_'.format(mod)
elif protein.startswith('{}'.format(mod)):
return mod
def get_description(record, rectype):
if rectype == 'ensembl':
desc_spl = [x.split(':') for x in record.description.split()]
try:
descix = [ix for ix, x in enumerate(desc_spl) if x[0] == 'description'][0]
except IndexError:
return 'NA'
desc = ' '.join([':'.join(x) for x in desc_spl[descix:]])[12:]
return desc
elif rectype == 'swiss':
desc = []
for part in record.description.split()[1:]:
if len(part.split('=')) > 1:
break
desc.append(part)
return ' '.join(desc)
def get_other_gene(record, fastadelim, genefield):
return record.description.split(fastadelim)[genefield]
def get_genes_pickfdr(fastafn, outputtype, fastadelim, genefield):
"""Called by protein FDR module for both ENSG and e.g. Uniprot"""
for rec in parse_fasta(fastafn):
rtype = get_record_type(rec)
if rtype == 'ensembl' and outputtype == 'ensg':
yield get_ensg(rec)
elif outputtype == 'genename':
yield get_symbol(rec, rtype, fastadelim, genefield)
def get_ensg(record):
fields = [x.split(':') for x in record.description.split()]
try:
return [x[1] for x in fields if x[0] == 'gene' and len(x) == 2][0]
except IndexError:
raise RuntimeError('ENSEMBL detected but cannot find gene ENSG in fasta')
def get_symbol(record, rectype, fastadelim, genefield):
if rectype == 'ensembl':
fields = [x.split(':') for x in record.description.split()]
sym = [x[1] for x in fields if x[0] == 'gene_symbol' and len(x) == 2]
elif rectype == 'swiss':
fields = [x.split('=') for x in record.description.split()]
sym = [x[1] for x in fields if x[0] == 'GN' and len(x) == 2]
elif fastadelim and fastadelim in record.description and genefield:
return record.description.split(fastadelim)[genefield]
else:
return 'NA'
try:
return sym[0]
except IndexError:
return 'NA'
def get_uniprot_evidence_level(record, rtype):
"""Returns uniprot protein existence evidence level for a fasta header.
Evidence levels are 1-5, but we return 5 - x since sorting still demands
that higher is better."""
if rtype != 'swiss':
return -1
for item in record.description.split():
item = item.split('=')
try:
if item[0] == 'PE' and len(item) == 2:
return 5 - int(item[1])
except IndexError:
continue
return -1
|
'''
Test BLEUScore metric against reference
'''
from neon.transforms.cost import BLEUScore
def test_bleuscore():
# dataset with two sentences
sentences = ["a quick brown fox jumped",
"the rain in spain falls mainly on the plains"]
references = [["a fast brown fox jumped",
"a quick brown fox vaulted",
"a rapid fox of brown color jumped",
"the dog is running on the grass"],
["the precipitation in spain falls on the plains",
"spanish rain falls for the most part on the plains",
"the rain in spain falls in the plains most of the time",
"it is raining today"]]
# reference scores for the given set of reference sentences
bleu_score_references = [92.9, 88.0, 81.5, 67.1] # bleu1, bleu2, bleu3, bleu4
# compute scores
bleu_metric = BLEUScore()
bleu_metric(sentences, references)
# check against references
for score, reference in zip(bleu_metric.bleu_n, bleu_score_references):
assert round(score, 1) == reference
if __name__ == '__main__':
test_bleuscore()
|
from .grant import Grant
from ..endpoint import AuthorizationEndpoint
class ImplicitGrant(Grant):
"""
The implicit grant type is used to obtain access tokens (it does not
support the issuance of refresh tokens) and is optimized for public
clients known to operate a particular redirection URI. These clients
are typically implemented in a browser using a scripting language
such as JavaScript.
+----------+
| Resource |
| Owner |
| |
+----------+
^
|
(B)
+----|-----+ Client Identifier +---------------+
| -+----(A)-- & Redirection URI --->| |
| User- | | Authorization |
| Agent -|----(B)-- User authenticates -->| Server |
| | | |
| |<---(C)--- Redirection URI ----<| |
| | with Access Token +---------------+
| | in Fragment
| | +---------------+
| |----(D)--- Redirection URI ---->| Web-Hosted |
| | without Fragment | Client |
| | | Resource |
| (F) |<---(E)------- Script ---------<| |
| | +---------------+
+-|--------+
| |
(A) (G) Access Token
| |
^ v
+---------+
| |
| Client |
| |
+---------+
Note: The lines illustrating steps (A) and (B) are broken into two
parts as they pass through the user-agent.
Figure 4: Implicit Grant Flow
"""
def get_redirection_uri(self, expires_in):
self._authorization_endpoint = AuthorizationEndpoint(self._server, self._request, self._client)
return self._authorization_endpoint.implicit(expires_in)
|
from framework import do_exit, get_globals, main
def do_work():
global g_test_import
global globals1
print("do_work")
globals1 = get_globals()
g_test_import = globals1["g_test_import"]
print("do_work: g_test_import = %s" % str(g_test_import))
main(do_work)
|
__revision__ = "test/Execute.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Test the Execute() function for executing actions directly.
"""
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
test.write('my_copy.py', """\
import sys
open(sys.argv[2], 'wb').write(open(sys.argv[1], 'rb').read())
try:
exitval = int(sys.argv[3])
except IndexError:
exitval = 0
sys.exit(exitval)
""")
test.write('SConstruct', """\
Execute(r'%(_python_)s my_copy.py a.in a.out')
Execute(Action(r'%(_python_)s my_copy.py b.in b.out'))
env = Environment(COPY = 'my_copy.py')
env.Execute(r'%(_python_)s my_copy.py c.in c.out')
env.Execute(Action(r'%(_python_)s my_copy.py d.in d.out'))
v = env.Execute(r'%(_python_)s $COPY e.in e.out')
assert v == 0, v
v = env.Execute(Action(r'%(_python_)s $COPY f.in f.out'))
assert v == 0, v
v = env.Execute(r'%(_python_)s $COPY g.in g.out 1')
assert v == 1, v
v = env.Execute(Action(r'%(_python_)s $COPY h.in h.out 2'))
assert v == 2, v
import shutil
Execute(lambda target, source, env: shutil.copy('i.in', 'i.out'))
Execute(Action(lambda target, source, env: shutil.copy('j.in', 'j.out')))
env.Execute(lambda target, source, env: shutil.copy('k.in', 'k.out'))
env.Execute(Action(lambda target, source, env: shutil.copy('l.in', 'l.out')))
Execute(Copy('m.out', 'm.in'))
Execute(Copy('nonexistent.out', 'nonexistent.in'))
""" % locals())
test.write('a.in', "a.in\n")
test.write('b.in', "b.in\n")
test.write('c.in', "c.in\n")
test.write('d.in', "d.in\n")
test.write('e.in', "e.in\n")
test.write('f.in', "f.in\n")
test.write('g.in', "g.in\n")
test.write('h.in', "h.in\n")
test.write('i.in', "i.in\n")
test.write('j.in', "j.in\n")
test.write('k.in', "k.in\n")
test.write('l.in', "l.in\n")
test.write('m.in', "m.in\n")
import sys
if sys.platform == 'win32':
expect = r"""scons: \*\*\* Error 1
scons: \*\*\* Error 2
scons: \*\*\* nonexistent.in/\*\.\*: (The system cannot find the path specified|Das System kann den angegebenen Pfad nicht finden)"""
else:
expect = r"""scons: \*\*\* Error 1
scons: \*\*\* Error 2
scons: \*\*\* nonexistent\.in: No such file or directory"""
test.run(arguments = '.', stdout = None, stderr = None)
test.must_contain_all_lines(test.stderr(), expect.splitlines(), find=TestSCons.search_re)
test.must_match('a.out', "a.in\n")
test.must_match('b.out', "b.in\n")
test.must_match('c.out', "c.in\n")
test.must_match('d.out', "d.in\n")
test.must_match('e.out', "e.in\n")
test.must_match('f.out', "f.in\n")
test.must_match('g.out', "g.in\n")
test.must_match('h.out', "h.in\n")
test.must_match('i.out', "i.in\n")
test.must_match('j.out', "j.in\n")
test.must_match('k.out', "k.in\n")
test.must_match('l.out', "l.in\n")
test.must_match('m.out', "m.in\n")
test.pass_test()
|
from django import forms
from django.core.validators import validate_email
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
from .utils import get_user_model
class PasswordRecoveryForm(forms.Form):
username_or_email = forms.CharField()
error_messages = {
'not_found': _("Sorry, this user doesn't exist."),
}
def __init__(self, *args, **kwargs):
self.case_sensitive = kwargs.pop('case_sensitive', True)
search_fields = kwargs.pop('search_fields', ('username', 'email'))
super(PasswordRecoveryForm, self).__init__(*args, **kwargs)
message = ("No other fields than email are supported "
"by default")
if len(search_fields) not in (1, 2):
raise ValueError(message)
for field in search_fields:
if field not in ['username', 'email']:
raise ValueError(message)
labels = {
'username': _('Username'),
'email': _('Email'),
'both': _('Username or Email'),
}
User = get_user_model() # noqa
if getattr(User, 'USERNAME_FIELD', 'username') == 'email':
self.label_key = 'email'
elif len(search_fields) == 1:
self.label_key = search_fields[0]
else:
self.label_key = 'both'
self.fields['username_or_email'].label = labels[self.label_key]
def clean_username_or_email(self):
username = self.cleaned_data['username_or_email']
cleaner = getattr(self, 'get_user_by_%s' % self.label_key)
self.cleaned_data['user'] = cleaner(username)
return username
def get_user_by_username(self, username):
key = 'username__%sexact' % ('' if self.case_sensitive else 'i')
User = get_user_model()
try:
user = User._default_manager.get(**{key: username})
except User.DoesNotExist:
raise forms.ValidationError(self.error_messages['not_found'],
code='not_found')
return user
def get_user_by_email(self, email):
validate_email(email)
key = 'email__%sexact' % ('' if self.case_sensitive else 'i')
User = get_user_model()
try:
user = User._default_manager.get(**{key: email})
except User.DoesNotExist:
raise forms.ValidationError(self.error_messages['not_found'],
code='not_found')
return user
def get_user_by_both(self, username):
key = '__%sexact'
key = key % '' if self.case_sensitive else key % 'i'
f = lambda field: Q(**{field + key: username})
filters = f('username') | f('email')
User = get_user_model()
try:
user = User._default_manager.get(filters)
except User.DoesNotExist:
raise forms.ValidationError(self.error_messages['not_found'],
code='not_found')
except User.MultipleObjectsReturned:
raise forms.ValidationError(_("Unable to find user."))
return user
class PasswordResetForm(forms.Form):
password1 = forms.CharField(
label=_('New password'),
widget=forms.PasswordInput,
)
password2 = forms.CharField(
label=_('New password (confirm)'),
widget=forms.PasswordInput,
)
error_messages = {
'password_mismatch': _("The two passwords didn't match."),
}
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(PasswordResetForm, self).__init__(*args, **kwargs)
def clean_password2(self):
password1 = self.cleaned_data.get('password1', '')
password2 = self.cleaned_data['password2']
if not password1 == password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch')
return password2
def save(self, commit=True):
self.user.set_password(self.cleaned_data['password1'])
if commit:
get_user_model()._default_manager.filter(pk=self.user.pk).update(
password=self.user.password,
)
return self.user
|
from bioscrape.inference import DeterministicLikelihood as DLL
from bioscrape.inference import StochasticTrajectoriesLikelihood as STLL
from bioscrape.inference import StochasticTrajectories
from bioscrape.inference import BulkData
import warnings
import numpy as np
class PIDInterface():
'''
PID Interface : Parameter identification interface.
Super class to create parameter identification (PID) interfaces. Two PID interfaces currently implemented:
Deterministic and Stochastic inference using time-series data.
To add a new PIDInterface - simply add a new subclass of this parent class with your desired
log-likelihood functions. You can even have your own check_prior function in that class if you do not
prefer to use the built in priors with this package.
'''
def __init__(self, params_to_estimate, M, prior):
'''
Parent class for all PID interfaces.
Arguments:
* `params_to_estimate` : List of parameter names to be estimated
* `M` : The bioscrape Model object to use for inference
* `prior` : A dictionary specifying prior distribution.
Two built-in prior functions are `uniform_prior` and `gaussian_prior`.
Each prior has its own syntax for accepting the distribution parameters in the dictionary.
New priors may be added. The suggested format for prior dictionaries:
prior_dict = {'parameter_name': ['prior_name', prior_distribution_parameters]}
For built-in uniform prior, use {'parameter_name':['uniform', lower_bound, upper_bound]}
For built-in gaussian prior, use {'parameter_name':['gaussian', mean, standard_deviation, probability threshold]}
New PID interfaces can be added by creating child classes of PIDInterface class as shown for
Built-in PID interfaces : `StochasticInference` and `DeterministicInference`
'''
self.params_to_estimate = params_to_estimate
self.M = M
self.prior = prior
return
def check_prior(self, params_dict):
'''
To add new prior functions: simply add a new function similar to ones that exist and then
call it here.
'''
lp = 0.0
for key,value in params_dict.items():
if 'positive' in self.prior[key] and value < 0:
return np.inf
prior_type = self.prior[key][0]
if prior_type == 'uniform':
lp += self.uniform_prior(key, value)
elif prior_type == 'gaussian':
lp += self.gaussian_prior(key, value)
elif prior_type == 'exponential':
lp += self.exponential_prior(key, value)
elif prior_type == 'gamma':
lp += self.gamma_prior(key, value)
elif prior_type == 'log-uniform':
lp += self.log_uniform_prior(key, value)
elif prior_type == 'log-gaussian':
lp += self.log_gaussian_prior(key, value)
elif prior_type == 'beta':
lp += self.beta_prior(key, value)
elif prior_type == 'custom':
# The last element in the prior dictionary must be a callable function
# The callable function shoud have the following signature :
# Arguments: param_name (str), param_value(float)
# Returns: log prior probability (float or numpy inf)
custom_fuction = self.prior[key][-1]
lp += custom_fuction(key, value)
else:
raise ValueError('Prior type undefined.')
return lp
def uniform_prior(self, param_name, param_value):
'''
Check if given param_value is valid according to the prior distribution.
Returns np.Inf if the param_value is outside the prior range and 0.0 if it is inside.
param_name is used to look for the parameter in the prior dictionary.
'''
prior_dict = self.prior
if prior_dict is None:
raise ValueError('No prior found')
lower_bound = prior_dict[param_name][1]
upper_bound = prior_dict[param_name][2]
if param_value > upper_bound or param_value < lower_bound:
return np.inf
else:
return np.log( 1/(upper_bound - lower_bound) )
def gaussian_prior(self, param_name, param_value):
'''
Check if given param_value is valid according to the prior distribution.
Returns the log prior probability or np.Inf if the param_value is invalid.
'''
prior_dict = self.prior
if prior_dict is None:
raise ValueError('No prior found')
mu = prior_dict[param_name][1]
sigma = prior_dict[param_name][2]
if sigma < 0:
raise ValueError('The standard deviation must be positive.')
# Using probability density function for normal distribution
# Using scipy.stats.norm has overhead that affects speed up to 2x
prob = 1/(np.sqrt(2*np.pi) * sigma) * np.exp(-0.5*(param_value - mu)**2/sigma**2)
if prob < 0:
warnings.warn('Probability less than 0 while checking Gaussian prior! Current parameter name and value: {0}:{1}.'.format(param_name, param_value))
return np.inf
else:
return np.log(prob)
def exponential_prior(self, param_name, param_value):
'''
Check if given param_value is valid according to the prior distribution.
Returns the log prior probability or np.inf if the param_value is invalid.
'''
prior_dict = self.prior
if prior_dict is None:
raise ValueError('No prior found')
lambda_p = prior_dict[param_name][1]
prob = lambda_p * np.exp(-lambda_p * param_value)
if prob < 0:
warnings.warn('Probability less than 0 while checking Exponential prior! Current parameter name and value: {0}:{1}.'.format(param_name, param_value))
return np.inf
else:
return np.log(prob)
def gamma_prior(self, param_name, param_value):
'''
Check if given param_value is valid according to the prior distribution.
Returns the log prior probability or np.inf if the param_value is invalid.
'''
prior_dict = self.prior
if prior_dict is None:
raise ValueError('No prior found')
alpha = prior_dict[param_name][1]
beta = prior_dict[param_name][2]
from scipy.special import gamma
prob = (beta**alpha)/gamma(alpha) * param_value**(alpha - 1) * np.exp(-1 * beta*param_value)
if prob < 0:
warnings.warn('Probability less than 0 while checking Exponential prior! Current parameter name and value: {0}:{1}.'.format(param_name, param_value))
return np.inf
else:
return np.log(prob)
def beta_prior(self, param_name, param_value):
'''
Check if given param_value is valid according to the prior distribution.
Returns the log prior probability or np.inf if the param_value is invalid.
'''
prior_dict = self.prior
if prior_dict is None:
raise ValueError('No prior found')
alpha = prior_dict[param_name][1]
beta = prior_dict[param_name][2]
import scipy.special.beta as beta_func
prob = (param_value**(alpha-1) * (1 - param_value)**(beta - 1) )/beta_func(alpha, beta)
if prob < 0:
warnings.warn('Probability less than 0 while checking Exponential prior! Current parameter name and value: {0}:{1}.'.format(param_name, param_value))
return np.inf
else:
return np.log(prob)
def log_uniform_prior(self, param_name, param_value):
'''
Check if given param_value is valid according to the prior distribution.
Returns the log prior probability or np.inf if the param_value is invalid.
'''
prior_dict = self.prior
if prior_dict is None:
raise ValueError('No prior found')
lower_bound = prior_dict[param_name][1]
upper_bound = prior_dict[param_name][2]
if lower_bound < 0 or upper_bound < 0:
raise ValueError('Upper and lower bounds for log-uniform prior must be positive.')
if param_value > upper_bound or param_value < lower_bound:
return np.inf
prob = 1/(param_value* (np.log(upper_bound) - np.log(lower_bound)))
if prob < 0:
warnings.warn('Probability less than 0 while checking Log-Uniform prior! Current parameter name and value: {0}:{1}.'.format(param_name, param_value))
return np.inf
else:
return np.log(prob)
def log_gaussian_prior(self, param_name, param_value):
'''
Check if given param_value is valid according to the prior distribution.
Returns the log prior probability or np.inf if the param_value is invalid.
'''
prior_dict = self.prior
if prior_dict is None:
raise ValueError('No prior found')
mu = prior_dict[param_name][1]
sigma = prior_dict[param_name][2]
if sigma < 0:
raise ValueError('The standard deviation must be positive.')
# Using probability density function for log-normal distribution
prob = 1/(param_value * np.sqrt(2*np.pi) * sigma) * np.exp((-0.5 * (np.log(param_value) - mu)**2)/sigma**2)
if prob < 0:
warnings.warn('Probability less than 0 while checking log-normal prior! Current parameter name and value: {0}:{1}.'.format(param_name, param_value))
return np.inf
else:
return np.log(prob)
class StochasticInference(PIDInterface):
def __init__(self, params_to_estimate, M, prior):
self.LL_stoch = None
self.dataStoch = None
super().__init__(params_to_estimate, M, prior)
return
def setup_likelihood_function(self, data, timepoints, measurements, initial_conditions, norm_order = 2, N_simulations = 3, debug = False, **kwargs):
N = np.shape(data)[0]
if debug:
print('Stochastic inference attributes:')
print('The timepoints shape is {0}'.format(np.shape(timepoints)))
print('The data shape is {0}'.format(np.shape(data)))
print('The measurmenets is {0}'.format(measurements))
print('The N is {0}'.format(N))
print('Using the initial conditions: {0}'.format(initial_conditions))
self.dataStoch = StochasticTrajectories(np.array(timepoints), data, measurements, N)
#If there are multiple initial conditions in a data-set, should correspond to multiple initial conditions for inference.
#Note len(initial_conditions) must be equal to the number of trajectories N
self.LL_stoch = STLL(model = self.M, init_state = initial_conditions,
data = self.dataStoch, N_simulations = N_simulations, norm_order = norm_order)
def get_likelihood_function(self, params):
# Set params here and return the likelihood object.
if self.LL_stoch is None:
raise RuntimeError("Must call StochasticInference.setup_likelihood_function before using StochasticInference.get_likelihood_function.")
#Set params
params_dict = {}
for key, p in zip(self.params_to_estimate, params):
params_dict[key] = p
self.LL_stoch.set_init_params(params_dict)
#Prior
lp = self.check_prior(params_dict)
if not np.isfinite(lp):
return -np.inf
LL_stoch_cost = self.LL_stoch.py_log_likelihood()
ln_prob = lp + LL_stoch_cost
return ln_prob
class DeterministicInference(PIDInterface):
def __init__(self, params_to_estimate, M, prior):
self.LL_det = None
self.dataDet = None
super().__init__(params_to_estimate, M, prior)
return
def setup_likelihood_function(self, data, timepoints, measurements, initial_conditions, norm_order = 2, debug = False, **kwargs):
N = np.shape(data)[0]
#Create a data Objects
# In this case the timepoints should be a list of timepoints vectors for each iteration
self.dataDet = BulkData(np.array(timepoints), data, measurements, N)
#If there are multiple initial conditions in a data-set, should correspond to multiple initial conditions for inference.
#Note len(initial_conditions) must be equal to the number of trajectories N
if debug:
print('The deterministic inference attributes:')
print('The timepoints shape is {0}'.format(np.shape(timepoints)))
print('The data shape is {0}'.format(np.shape(data)))
print('The measurmenets is {0}'.format(measurements))
print('The N is {0}'.format(N))
print('Using the initial conditions: {0}'.format(initial_conditions))
#Create Likelihood object
self.LL_det = DLL(model = self.M, init_state = initial_conditions, data = self.dataDet, norm_order = norm_order)
def get_likelihood_function(self, params):
if self.LL_det is None:
raise RuntimeError("Must call DeterministicInference.setup_likelihood_function before using DeterministicInference.get_likelihood_function.")
#this part is the only part that is called repeatedly
params_dict = {}
for key, p in zip(self.params_to_estimate, params):
params_dict[key] = p
self.LL_det.set_init_params(params_dict)
# Check prior
lp = 0
lp = self.check_prior(params_dict)
if not np.isfinite(lp):
return -np.inf
#apply cost function
LL_det_cost = self.LL_det.py_log_likelihood()
ln_prob = lp + LL_det_cost
return ln_prob
|
from flask_bcrypt import generate_password_hash
generate_password_hash('password1', 8)
|
"""
Module for main window related functionality
"""
import PyQt4.QtGui
from herculeum.ui.controllers import EndScreenController, StartGameController
from herculeum.ui.gui.endscreen import EndScreen
from herculeum.ui.gui.eventdisplay import EventMessageDockWidget
from herculeum.ui.gui.map import PlayMapWindow
from herculeum.ui.gui.menu import MenuDialog
from herculeum.ui.gui.startgame import StartGameWidget
from PyQt4.QtCore import QFile, Qt
from PyQt4.QtGui import (QAction, QApplication, QCursor, QDialog, QIcon,
QMainWindow, QPixmap, QSplashScreen)
class QtUserInterface():
"""
Class for Qt User Interface
.. versionadded:: 0.9
"""
def __init__(self, application):
"""
Default constructor
"""
super().__init__()
self.application = application
self.splash_screen = None
self.qt_app = QApplication([])
# self.qt_app.setOverrideCursor(QCursor(Qt.BlankCursor))
def show_splash_screen(self):
"""
Show splash screen
"""
file = QFile(':herculeum.qss')
file.open(QFile.ReadOnly)
styleSheet = str(file.readAll().data(), 'ascii')
self.qt_app.setStyleSheet(styleSheet)
pixmap = QPixmap(':splash.png')
self.splash_screen = QSplashScreen(pixmap)
self.splash_screen.show()
def show_main_window(self):
"""
Show main window
"""
main_window = MainWindow(self.application,
self.application.surface_manager,
self.qt_app,
None,
Qt.FramelessWindowHint,
StartGameController(self.application.level_generator_factory,
self.application.creature_generator,
self.application.item_generator,
self.application.config.start_level))
self.splash_screen.finish(main_window)
main_window.show_new_game()
self.qt_app.exec_()
class MainWindow(QMainWindow):
"""
Class for displaying main window
.. versionadded:: 0.5
"""
def __init__(self, application, surface_manager, qt_app, parent, flags,
controller):
"""
Default constructor
"""
super().__init__(parent, flags)
self.application = application
self.surface_manager = surface_manager
self.qt_app = qt_app
self.controller = controller
self.__set_layout()
def __set_layout(self):
exit_action = QAction(QIcon(':exit-game.png'),
'&Quit',
self)
exit_action.setShortcut('Ctrl+Q')
exit_action.setStatusTip('Quit game')
exit_action.triggered.connect(PyQt4.QtGui.qApp.quit)
inventory_action = QAction(QIcon(':inventory.png'),
'Inventory',
self)
inventory_action.setShortcut('Ctrl+I')
inventory_action.setStatusTip('Show inventory')
inventory_action.triggered.connect(self.__show_menu)
character_action = QAction(QIcon(':character.png'),
'Character',
self)
character_action.setShortcut('Ctrl+C')
character_action.setStatusTip('Show character')
self.map_window = PlayMapWindow(parent=None,
model=self.application.world,
surface_manager=self.surface_manager,
action_factory=self.application.action_factory,
rng=self.application.rng,
rules_engine=self.application.rules_engine,
configuration=self.application.config)
self.setCentralWidget(self.map_window)
self.map_window.MenuRequested.connect(self.__show_menu)
self.map_window.EndScreenRequested.connect(self.__show_end_screen)
self.setGeometry(50, 50, 800, 600)
self.setWindowTitle('Herculeum')
self.setWindowIcon(QIcon(':rune-stone.png'))
self.showMaximized()
def show_new_game(self):
"""
Show new game dialog
"""
app = self.application
start_dialog = StartGameWidget(generator=app.player_generator,
config=self.application.config.controls,
parent=self,
application=self.application,
surface_manager=self.surface_manager,
flags=Qt.Dialog | Qt.CustomizeWindowHint)
result = start_dialog.exec_()
if result == QDialog.Accepted:
player = start_dialog.player_character
intro_text = self.controller.setup_world(self.application.world,
player)
player.register_for_updates(self.map_window.hit_points_widget)
self.map_window.hit_points_widget.show_hit_points(player)
self.map_window.hit_points_widget.show_spirit_points(player)
self.map_window.message_widget.text_edit.setText(intro_text)
self.__show_map_window()
def __show_map_window(self):
"""
Show map window
"""
self.map_window.construct_scene()
def __show_message_window(self, character):
"""
Show message display
:param character: character which events to display
:type character: Character
"""
messages_display = EventMessageDockWidget(self, character)
self.addDockWidget(Qt.BottomDockWidgetArea,
messages_display)
def __show_menu(self):
"""
Show menu
"""
menu_dialog = MenuDialog(self.surface_manager,
self.application.world.player,
self.application.action_factory,
self.application.config.controls,
self,
Qt.Dialog | Qt.CustomizeWindowHint)
menu_dialog.exec_()
def __show_end_screen(self):
"""
Show end screen
.. versionadded:: 0.8
"""
end_screen = EndScreen(self.application.world,
self.application.config.controls,
self,
Qt.Dialog | Qt.CustomizeWindowHint,
controller=EndScreenController())
end_screen.exec_()
self.qt_app.quit()
|
from utils.face import Face
import pygame
from utils.message import Message
from utils.alarm import Alarm
class Button(pygame.sprite.Sprite):
def __init__(self, rect, color=(0,0,255), action=None):
pygame.sprite.Sprite.__init__(self)
self.color = color
self.action = action
self.rect = pygame.Rect(rect)
self.baseImage = pygame.Surface((self.rect.width, self.rect.height))
self.image = self.baseImage
def update(self):
rect = self.baseImage.get_rect()
pygame.draw.circle(self.baseImage, self.color, rect.center, rect.width/2, 1);
def touchDown(self):
rect = self.baseImage.get_rect()
pygame.draw.circle(self.baseImage, self.color, rect.center, rect.width/2, 0);
def touchUp(self):
rect = self.baseImage.get_rect()
self.image.fill(pygame.Color("black"))
pygame.draw.circle(self.baseImage, self.color, rect.center, rect.width/2, 1);
if self.action is not None:
self.action()
def setAction(self, action):
self.action = action
class Line(Face):
def __init__(self, rect, color=(0,0,255), text=""):
pygame.sprite.Sprite.__init__(self)
self._alarmList = {}
self.color = color
self.rect = pygame.Rect(rect)
self.text = text
self.baseImage = pygame.Surface((self.rect.width, self.rect.height))
self.image = self.baseImage
self.faceSprite = pygame.sprite.GroupSingle(Message((self.text,), vector=(0,0), fontsize=45, align="left", padding=0, fgcolor=(0,0,255)))
surfaceRect = self.image.get_rect()
self.faceSprite.sprite.rect.midleft = surfaceRect.midleft
def update(self):
self.faceSprite.draw(self.baseImage)
class AlarmSetting(Face):
def __init__(self, rect, alarm, color=(0,0,255)):
pygame.sprite.Sprite.__init__(self)
self._alarmList = {}
if isinstance(alarm, Alarm):
self._alarmObject = alarm
else:
raise Exception("Not an Alarm-class object")
self.color = color
self.rect = pygame.Rect(rect)
self.requestingFace = False
self.baseImage = pygame.Surface((self.rect.width, self.rect.height))
self.image = self.baseImage
self._lines = []
for i in range(4):
line = pygame.sprite.GroupSingle(Line(pygame.Rect((0, 0),(rect.height/5*4, rect.height/5)), text="Hello"))
line.sprite.rect.topright = (rect.width, rect.height/4*i)
self._lines.append(line)
def addAlarm(self):
line = pygame.sprite.GroupSingle(Button(pygame.Rect((0, 0),(self.rect.height/5, self.rect.height/5))))
line.sprite.rect.topright = (self.rect.width, self.rect.height/4)
line.sprite.setAction(self.addAlarm)
self._lines.append(line)
def update(self):
for line in self._lines:
line.update()
line.draw(self.baseImage)
def handleEvent(self, event):
pos = pygame.mouse.get_pos()
if event.type == pygame.MOUSEBUTTONDOWN:
for butt in self._lines:
if butt.sprite.rect.collidepoint(pos):
butt.sprite.touchDown()
if event.type == pygame.MOUSEBUTTONUP:
for butt in self._lines:
if butt.sprite.rect.collidepoint(pos):
butt.sprite.touchUp()
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.forms import inlineformset_factory
from djangosige.apps.financeiro.models import PlanoContasGrupo, PlanoContasSubgrupo
class PlanoContasGrupoForm(forms.ModelForm):
class Meta:
model = PlanoContasGrupo
fields = ('tipo_grupo', 'descricao',)
widgets = {
'descricao': forms.TextInput(attrs={'class': 'form-control'}),
'tipo_grupo': forms.Select(attrs={'class': 'form-control'}),
}
labels = {
'descricao': _('Descrição'),
'tipo_grupo': _('Tipo de lançamento'),
}
class PlanoContasSubgrupoForm(forms.ModelForm):
class Meta:
model = PlanoContasSubgrupo
fields = ('descricao',)
widgets = {
'descricao': forms.TextInput(attrs={'class': 'form-control'}),
}
labels = {
'descricao': _('Descrição'),
}
PlanoContasSubgrupoFormSet = inlineformset_factory(
PlanoContasGrupo, PlanoContasSubgrupo, form=PlanoContasSubgrupoForm, fk_name='grupo', extra=1, can_delete=True)
|
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from django.contrib.auth.admin import UserAdmin
from django.contrib import admin
from django import forms
class VoterCreationForm(UserCreationForm):
section = forms.CharField()
def save(self, commit=True):
user = super(VoterCreationForm, self).save(commit=False)
user.section = self.cleaned_data['section']
if commit:
user.save()
return user
class Meta:
model = User
fields = ('username', 'password1', 'password2', 'section', 'first_name', 'last_name', 'is_active', 'is_staff', 'is_superuser')
class VoterChangeForm(UserChangeForm):
section = forms.CharField()
def save(self, commit=True):
user = super(VoterChangeForm, self).save(commit=False)
user.section = self.cleaned_data['section']
if commit:
user.save()
return user
class Meta:
model = User
exclude = ('',)
class VoterAdmin(UserAdmin):
form = VoterChangeForm
add_form = VoterCreationForm
list_filter = UserAdmin.list_filter + ('section',)
fieldsets = (
(None, {'fields': ('username', 'password')}),
(('Personal info'), {'fields': ('first_name', 'last_name', 'section')}),
(('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('username', 'password1', 'password2', 'section', 'first_name', 'last_name', 'is_active', 'is_staff', 'is_superuser')}
),
)
admin.site.unregister(User)
admin.site.register(User, VoterAdmin)
|
import logging
logging.basicConfig(level=logging.DEBUG)
from gntp.notifier import GrowlNotifier
import platform
growl = GrowlNotifier(notifications=['Testing'],password='password',hostname='ayu')
growl.subscribe(platform.node(),platform.node(),12345)
|
from django.conf.urls.defaults import *
urlpatterns = patterns('member.views',
url(r'^$', 'login', name='passport_index'),
url(r'^register/$', 'register', name='passport_register'),
url(r'^login/$', 'login', name='passport_login'),
url(r'^logout/$', 'logout', name='passport_logout'),
url(r'^active/$', 'active', name='passport_active'),
url(r'^forget/$', 'forget', name='passport_forget'),
url(r'^profile/$', 'profile', name='passport_profile'),
)
|
"""
The following examples are used to demonstrate how to get/record
analytics
The method signatures are:
Pushbots.get_analytics()
and
Pushbots.record_analytics(platform=None, data=None)
In which you must specify either platform or data.
"""
from pushbots import Pushbots
def example_get_analytics():
"""Get analytics by calling Pushbots.get_analytics()"""
# Define app_id and secret
my_app_id = 'my_app_id'
my_secret = 'my_secret'
# Create a Pushbots instance
pushbots = Pushbots(app_id=my_app_id, secret=my_secret)
code, message = pushbots.get_analytics()
print('Returned code: {0}'.format(code))
print('Returned message: {0}'.format(message))
def example_record_analytics1():
"""Record analytics by passing platform directly to
Pushbots.record_analytics()
"""
# Define app_id and secret
my_app_id = 'my_app_id'
my_secret = 'my_secret'
# Create a Pushbots instance
pushbots = Pushbots(app_id=my_app_id, secret=my_secret)
# Define platform
platform = Pushbots.PLATFORM_IOS
code, message = pushbots.record_analytics(platform=platform)
print('Returned code: {0}'.format(code))
print('Returned message: {0}'.format(message))
def example_record_analytics2():
"""Record analytics by passing data defined by you to
Pushbots.record_analytics()
"""
# Define app_id and secret
my_app_id = 'my_app_id'
my_secret = 'my_secret'
# Create a Pushbots instance
pushbots = Pushbots(app_id=my_app_id, secret=my_secret)
# Define data
data = {'platform': '0'} # '0' is Equivalent to Pushbots.PLATFORM_IOS
code, message = pushbots.record_analytics(data=data)
print('Returned code: {0}'.format(code))
print('Returned message: {0}'.format(message))
|
from __future__ import unicode_literals
import httpretty
import json
import sure
from pyeqs import QuerySet, Filter
from pyeqs.dsl import Term, Sort, ScriptScore
from tests.helpers import homogeneous
@httpretty.activate
def test_create_queryset_with_host_string():
"""
Create a queryset with a host given as a string
"""
# When create a queryset
t = QuerySet("localhost", index="bar")
# And I have records
response = {
"took": 1,
"hits": {
"total": 1,
"max_score": 1,
"hits": [
{
"_index": "bar",
"_type": "baz",
"_id": "1",
"_score": 10,
"_source": {
"foo": "bar"
},
"sort": [
1395687078000
]
}
]
}
}
httpretty.register_uri(httpretty.GET, "http://localhost:9200/bar/_search",
body=json.dumps(response),
content_type="application/json")
# When I run a query
results = t[0:1]
# Then I see the response.
len(results).should.equal(1)
@httpretty.activate
def test_create_queryset_with_host_dict():
"""
Create a queryset with a host given as a dict
"""
# When create a queryset
connection_info = {"host": "localhost", "port": 8080}
t = QuerySet(connection_info, index="bar")
# And I have records
good_response = {
"took": 1,
"hits": {
"total": 1,
"max_score": 1,
"hits": [
{
"_index": "bar",
"_type": "baz",
"_id": "1",
"_score": 10,
"_source": {
"foo": "bar"
},
"sort": [
1395687078000
]
}
]
}
}
bad_response = {
"took": 1,
"hits": {
"total": 0,
"max_score": None,
"hits": []
}
}
httpretty.register_uri(httpretty.GET, "http://localhost:9200/bar/_search",
body=json.dumps(bad_response),
content_type="application/json")
httpretty.register_uri(httpretty.GET, "http://localhost:8080/bar/_search",
body=json.dumps(good_response),
content_type="application/json")
# When I run a query
results = t[0:1]
# Then I see the response.
len(results).should.equal(1)
results[0]["_source"]["foo"].should.equal("bar")
@httpretty.activate
def test_create_queryset_with_host_list():
"""
Create a queryset with a host given as a list
"""
# When create a queryset
connection_info = [{"host": "localhost", "port": 8080}]
t = QuerySet(connection_info, index="bar")
# And I have records
good_response = {
"took": 1,
"hits": {
"total": 1,
"max_score": 1,
"hits": [
{
"_index": "bar",
"_type": "baz",
"_id": "1",
"_score": 10,
"_source": {
"foo": "bar"
},
"sort": [
1395687078000
]
}
]
}
}
bad_response = {
"took": 1,
"hits": {
"total": 0,
"max_score": None,
"hits": []
}
}
httpretty.register_uri(httpretty.GET, "http://localhost:9200/bar/_search",
body=json.dumps(bad_response),
content_type="application/json")
httpretty.register_uri(httpretty.GET, "http://localhost:8080/bar/_search",
body=json.dumps(good_response),
content_type="application/json")
# When I run a query
results = t[0:1]
# Then I see the response.
len(results).should.equal(1)
results[0]["_source"]["foo"].should.equal("bar")
|
from typing import Union, Iterator
from ...symbols import NOUN, PROPN, PRON
from ...errors import Errors
from ...tokens import Doc, Span
def noun_chunks(doclike: Union[Doc, Span]) -> Iterator[Span]:
"""
Detect base noun phrases from a dependency parse. Works on both Doc and Span.
"""
# fmt: off
labels = ["nsubj", "nsubj:pass", "obj", "iobj", "ROOT", "appos", "nmod", "nmod:poss"]
# fmt: on
doc = doclike.doc # Ensure works on both Doc and Span.
if not doc.has_annotation("DEP"):
raise ValueError(Errors.E029)
np_deps = [doc.vocab.strings[label] for label in labels]
conj = doc.vocab.strings.add("conj")
np_label = doc.vocab.strings.add("NP")
prev_end = -1
for i, word in enumerate(doclike):
if word.pos not in (NOUN, PROPN, PRON):
continue
# Prevent nested chunks from being produced
if word.left_edge.i <= prev_end:
continue
if word.dep in np_deps:
prev_end = word.right_edge.i
yield word.left_edge.i, word.right_edge.i + 1, np_label
elif word.dep == conj:
head = word.head
while head.dep == conj and head.head.i < head.i:
head = head.head
# If the head is an NP, and we're coordinated to it, we're an NP
if head.dep in np_deps:
prev_end = word.right_edge.i
yield word.left_edge.i, word.right_edge.i + 1, np_label
SYNTAX_ITERATORS = {"noun_chunks": noun_chunks}
|
from random import randint, seed, choice, random
from numpy import zeros, uint8, cumsum, floor, ceil
from math import sqrt, log
from collections import namedtuple
from PIL import Image
from logging import info, getLogger
class Tree:
def __init__(self, leaf):
self.leaf = leaf
self.lchild = None
self.rchild = None
def get_leafs(self):
if self.lchild == None and self.rchild == None:
return [self.leaf]
else:
return self.lchild.get_leafs()+self.rchild.get_leafs()
def get_level(self, level, queue):
if queue == None:
queue = []
if level == 1:
queue.push(self)
else:
if self.lchild != None:
self.lchild.get_level(level-1, queue)
if self.rchild != None:
self.rchild.get_level(level-1, queue)
return queue
def paint(self, c):
self.leaf.paint(c)
if self.lchild != None:
self.lchild.paint(c)
if self.rchild != None:
self.rchild.paint(c)
class Container():
def __init__(self, x, y, w, h):
self.x = x
self.y = y
self.w = w
self.h = h
self.center = (self.x+int(self.w/2),self.y+int(self.h/2))
self.distance_from_center = sqrt((self.center[0]-MAP_WIDTH/2)**2 + (self.center[1]-MAP_HEIGHT/2)**2)
def paint(self, c):
c.stroke_rectangle(self.x, self.y, self.w, self.h)
def draw_path(self,c,container):
c.path(self.center[0],self.center[1],container.center[0],container.center[1])
class Canvas:
brushes = {"empty":0, "hallway":1, "room":2}
def __init__(self, w, h, color = "empty"):
self.board = zeros((h,w), dtype=uint8)
self.w = w
self.h = h
self.set_brush(color)
def set_brush(self, code):
self.color = self.brushes[code]
def stroke_rectangle(self, x, y, w, h):
self.line(x,y,w,True)
self.line(x,y+h-1,w,True)
self.line(x,y,h,False)
self.line(x+w-1,y,h,False)
def filled_rectangle(self, x, y, w, h):
self.board[y:y+h,x:x+w] = self.color
def line(self, x, y, length, horizontal):
if horizontal:
self.board[y,x:x+length] = self.color
else:
self.board[y:y+length,x] = self.color
def path(self,x1,y1,x2,y2):
self.board[y1:y2+1,x1:x2+1] = self.color
def circle(self,x,y,r):
for x_offset in range(-r,r+1):
for y_offset in range(-r,r+1):
if sqrt(x_offset**2+y_offset**2)<r:
self.board[x+x_offset,y+y_offset] = self.color
def draw(self):
im = Image.fromarray(self.board)
im.save(MAP_NAME)
def __str__(self):
return str(self.board)
class Room:
environments = ["serene", "calm", "wild", "dangerous", "evil"]
biomes = ["rock", "rugged", "sand", "mossy", "muddy", "flooded", "gelid", "gloomy", "magma"]
biomes_CDF = cumsum([0.22,0.14,0.12,0.10,0.10,0.07,0.06,0.06,0.04,0.03,0.03,0.03])
def __init__(self, container):
self.x = container.x+randint(1, floor(container.w/3))
self.y = container.y+randint(1, floor(container.h/3))
self.w = container.w-(self.x-container.x)
self.h = container.h-(self.y-container.y)
self.w -= randint(0,floor(self.w/3))
self.h -= randint(0,floor(self.w/3))
self.environment = int(min(4,10*(container.distance_from_center/MAP_WIDTH)+random()*2-1))
roll = random()*0.9+(2*container.distance_from_center/MAP_WIDTH)*0.1
self.biome = next(n for n,b in enumerate(self.biomes_CDF) if roll<b)
def paint(self,c):
c.filled_rectangle(self.x, self.y,self.w, self.h)
def random_split(container):
if container.w<MIN_ROOM_SIDE and container.h<MIN_ROOM_SIDE:
return None
def _split_vertical(container):
r1 = None
r2 = None
min_w = int(W_RATIO*container.h)+1
if container.w < 2*min_w:
return None
r1 = Container(container.x,container.y,randint(min_w, container.w-min_w),container.h)
r2 = Container(container.x+r1.w,container.y,container.w-r1.w,container.h)
return [r1, r2]
def _split_horizontal(container):
r1 = None
r2 = None
min_h = int(H_RATIO*container.w)+1
if container.h < 2*min_h:
return None
r1 = Container(container.x,container.y,container.w,randint(min_h, container.h-min_h))
r2 = Container(container.x,container.y+r1.h,container.w,container.h-r1.h)
return [r1, r2]
if randint(0,1) == 0:
res = _split_vertical(container)
if res == None:
return _split_horizontal(container)
return res
else:
res = _split_horizontal(container)
if res == None:
return _split_vertical(container)
return res
def split_container(container, iter):
root = Tree(container)
if iter != 0:
sr = random_split(container)
if sr!=None:
root.lchild = split_container(sr[0], iter-1)
root.rchild = split_container(sr[1], iter-1)
return root
def draw_paths(c, tree):
if tree.lchild == None or tree.rchild == None:
return
tree.lchild.leaf.draw_path(c, tree.rchild.leaf)
draw_paths(c, tree.lchild)
draw_paths(c, tree.rchild)
MAP_WIDTH = 0
MAP_HEIGHT = 0
N_ITERATIONS = 0
H_RATIO = 0
W_RATIO = 0
MIN_ROOM_SIDE = 0
CENTER_HUB_HOLE = 0
CENTER_HUB_RADIO = 0
MAP_NAME = 0
def init(num_players):
global MAP_WIDTH,MAP_HEIGHT,N_ITERATIONS,H_RATIO,W_RATIO,MIN_ROOM_SIDE,CENTER_HUB_HOLE,CENTER_HUB_RADIO,MAP_NAME
MAP_WIDTH=int(500*sqrt(num_players))
MAP_HEIGHT=MAP_WIDTH
N_ITERATIONS=log(MAP_WIDTH*100,2)
H_RATIO=0.49
W_RATIO=H_RATIO
MIN_ROOM_SIDE = 32
CENTER_HUB_HOLE = 32
CENTER_HUB_RADIO = CENTER_HUB_HOLE-MIN_ROOM_SIDE/2
MAP_NAME="result%s.png"%MAP_WIDTH
def main(num_players, seed_number):
logger = getLogger('BSPTree')
logger.info("Initialising")
init(num_players)
seed(seed_number)
canvas = Canvas(MAP_WIDTH, MAP_HEIGHT)
canvas.set_brush("empty")
canvas.filled_rectangle(0,0,MAP_WIDTH,MAP_HEIGHT)
logger.info("Generating container tree")
# -1 on the main container to remove borders to avoid opened border rooms
main_container = Container(0, 0, MAP_WIDTH-1, MAP_HEIGHT-1)
container_tree = split_container(main_container, N_ITERATIONS)
logger.info("Generating hallways")
canvas.set_brush("hallway")
draw_paths(canvas, container_tree)
logger.info("Generating rooms")
canvas.set_brush("room")
leafs = container_tree.get_leafs()
rooms = []
for i in range(0, len(leafs)):
if CENTER_HUB_HOLE < leafs[i].distance_from_center < MAP_WIDTH/2:
rooms.append(Room(leafs[i]))
rooms[-1].paint(canvas)
logger.info("Generating hub")
canvas.circle(int(MAP_WIDTH/2),int(MAP_HEIGHT/2),int(CENTER_HUB_RADIO))
#canvas.draw()
return (rooms, canvas.board)
|
import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
def _uniform(*shape):
return numpy.random.uniform(-1, 1, shape).astype(numpy.float32)
@testing.parameterize(*testing.product({
'in_shapes': [((2,), (4,)), ((2, 1), (4, 2))],
'out_size': [3],
'batch_size': [2]
}))
class TestBilinearFunction(unittest.TestCase):
def setUp(self):
e1_shape = (self.batch_size,) + self.in_shapes[0]
e2_shape = (self.batch_size,) + self.in_shapes[1]
e1_size = numpy.prod(self.in_shapes[0])
e2_size = numpy.prod(self.in_shapes[1])
self.e1 = _uniform(*e1_shape)
self.e2 = _uniform(*e2_shape)
self.W = _uniform(e1_size, e2_size, self.out_size)
self.V1 = _uniform(e1_size, self.out_size)
self.V2 = _uniform(e2_size, self.out_size)
self.b = _uniform(self.out_size)
self.gy = _uniform(self.batch_size, self.out_size)
self.gge1 = _uniform(*self.e1.shape)
self.gge2 = _uniform(*self.e2.shape)
self.ggW = _uniform(*self.W.shape)
self.ggV1 = _uniform(*self.V1.shape)
self.ggV2 = _uniform(*self.V2.shape)
self.ggb = _uniform(*self.b.shape)
self.check_backward_options = {
'atol': 1e-5, 'rtol': 1e-4, 'dtype': numpy.float64}
self.check_double_backward_options = {
'atol': 1e-4, 'rtol': 1e-3, 'dtype': numpy.float64}
def check_forward(self, e1_data, e2_data, W_data, V1_data, V2_data,
b_data):
e1 = chainer.Variable(e1_data)
e2 = chainer.Variable(e2_data)
W = chainer.Variable(W_data)
e1_data = e1_data.reshape(e1_data.shape[0], -1)
e2_data = e2_data.reshape(e2_data.shape[0], -1)
xp = cuda.get_array_module(e1)
y_expect = xp.einsum('ij,ik,jkl->il', e1_data, e2_data, W_data)
flags = V1_data is None, V2_data is None, b_data is None
if any(flags):
if not all(flags):
raise ValueError(
'Test either all or none of the optional parameters.')
y = functions.bilinear(e1, e2, W)
else:
V1 = chainer.Variable(V1_data)
V2 = chainer.Variable(V2_data)
b = chainer.Variable(b_data)
y = functions.bilinear(e1, e2, W, V1, V2, b)
y_expect = xp.einsum('ij,ik,jkl->il', e1_data, e2_data, W_data)
y_expect += e1_data.dot(V1_data)
y_expect += e2_data.dot(V2_data)
y_expect += b_data
testing.assert_allclose(y_expect, cuda.to_cpu(y.data))
assert y.data.dtype == e1_data.dtype
def test_forward_cpu(self):
self.check_forward(self.e1, self.e2, self.W, self.V1, self.V2, self.b)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(
cuda.to_gpu(self.e1), cuda.to_gpu(self.e2), cuda.to_gpu(self.W),
cuda.to_gpu(self.V1), cuda.to_gpu(self.V2), cuda.to_gpu(self.b))
def test_partial_backward_cpu(self):
gradient_check.check_backward(
functions.bilinear, (self.e1, self.e2, self.W), self.gy,
**self.check_backward_options)
@attr.gpu
def test_partial_backward_gpu(self):
gradient_check.check_backward(
functions.bilinear,
(cuda.to_gpu(self.e1), cuda.to_gpu(self.e2), cuda.to_gpu(self.W)),
cuda.to_gpu(self.gy), **self.check_backward_options)
def test_full_backward_cpu(self):
gradient_check.check_backward(
functions.bilinear,
(self.e1, self.e2, self.W, self.V1, self.V2, self.b), self.gy,
**self.check_backward_options)
@attr.gpu
def test_full_backward_gpu(self):
gradient_check.check_backward(
functions.bilinear,
(cuda.to_gpu(self.e1), cuda.to_gpu(self.e2), cuda.to_gpu(self.W),
cuda.to_gpu(self.V1), cuda.to_gpu(self.V2), cuda.to_gpu(self.b)),
cuda.to_gpu(self.gy), **self.check_backward_options)
def test_partial_double_backward_cpu(self):
gradient_check.check_double_backward(
functions.bilinear, (self.e1, self.e2, self.W), self.gy,
(self.gge1, self.gge2, self.ggW), **self.check_backward_options)
@attr.gpu
def test_partial_double_backward_gpu(self):
gradient_check.check_double_backward(
functions.bilinear,
(cuda.to_gpu(self.e1), cuda.to_gpu(self.e2), cuda.to_gpu(self.W)),
cuda.to_gpu(self.gy),
(cuda.to_gpu(self.gge1), cuda.to_gpu(self.gge2),
cuda.to_gpu(self.ggW)), **self.check_backward_options)
def test_full_double_backward_cpu(self):
def f(*inputs):
y = functions.bilinear(*inputs)
return y * y
gradient_check.check_double_backward(
f, (self.e1, self.e2, self.W, self.V1, self.V2, self.b),
self.gy,
(self.gge1, self.gge2, self.ggW, self.ggV1, self.ggV2, self.ggb),
**self.check_double_backward_options)
@attr.gpu
def test_full_double_backward_gpu(self):
def f(*inputs):
y = functions.bilinear(*inputs)
return y * y
gradient_check.check_double_backward(
f,
(cuda.to_gpu(self.e1), cuda.to_gpu(self.e2), cuda.to_gpu(self.W),
cuda.to_gpu(self.V1), cuda.to_gpu(self.V2), cuda.to_gpu(self.b)),
cuda.to_gpu(self.gy),
(cuda.to_gpu(self.gge1), cuda.to_gpu(self.gge2),
cuda.to_gpu(self.ggW), cuda.to_gpu(self.V1), cuda.to_gpu(self.V2),
cuda.to_gpu(self.ggb)), **self.check_double_backward_options)
@attr.slow
class TestBilinearFunctionLarge(unittest.TestCase):
def setUp(self):
self.e1 = _uniform(256, 256)
self.e2 = _uniform(256, 256)
self.w = _uniform(256, 256, 256)
self.v1 = _uniform(256, 256)
self.v2 = _uniform(256, 256)
self.b = _uniform(256)
def test_cpu(self):
chainer.functions.bilinear(
self.e1, self.e2, self.w, self.v1, self.v2, self.b)
@attr.gpu
def test_gpu(self):
chainer.functions.bilinear(*map(cuda.to_gpu, (
self.e1, self.e2, self.w, self.v1, self.v2, self.b)))
class TestBilinearFunctionInvalidArgument(unittest.TestCase):
def setUp(self):
e1 = _uniform(3, 2)
e2 = _uniform(3, 4)
W = _uniform(2, 4, 5)
V1 = _uniform(2, 5)
self.e1 = chainer.Variable(e1)
self.e2 = chainer.Variable(e2)
self.W = chainer.Variable(W)
self.V1 = chainer.Variable(V1)
def test_invalid_full_partial_ambiguous(self):
with self.assertRaises(ValueError):
functions.bilinear(self.e1, self.e2, self.W, self.V1)
testing.run_module(__name__, __file__)
|
from modelmapper.declarations import Mapper, Field
from modelmapper.qt.fields import QLineEditAccessor
class String(QLineEditAccessor):
def get_value(self):
return str(self.widget.text())
def set_value(self, value):
self.widget.setText(str(value))
class Integer(QLineEditAccessor):
def get_value(self):
return int(self.widget.text())
def set_value(self, value):
self.widget.setText(int(value))
def get_child_x_mapper(x):
return {
'{}_link'.format(x): (x, 'val_{}'.format(x))
}
def get_d_mapper():
return {
'expediente_link': Mapper('c[0]', 'val_c[0]', get_child_x_mapper('a')),
'masa_bruta_link': Mapper('c[1]', 'val_c[1]', get_child_x_mapper('b')),
'nombre_link': Field('cc', 'val_cc'),
}
def get_model_mapper():
return {
'expediente_link': Field('expediente', String('expediente')),
'masa_bruta_link': Field('masa_bruta', Integer('masa_bruta')),
'nombre_link': Field('nombre', String('nombre'))
}
|
__author__ = 'bptripp'
import numpy as np
from scipy.optimize import curve_fit
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D
"""
Initialization of CNNs via clustering of inputs and convex optimization
of outputs.
"""
def sigmoid(x, centre, gain):
y = 1 / (1 + np.exp(-gain*(x-centre)))
return y
def gaussian(x, mu, sigma):
return np.exp(-np.power(x - mu, 2.) / (2 * np.power(sigma, 2.)))
def get_sigmoid_params(false_samples, true_samples, do_plot=False):
"""
Find gain and bias for sigmoid function that approximates probability
of class memberships. Probability based on Bayes' rule & gaussian
model of samples from each class.
"""
false_mu = np.mean(false_samples)
false_sigma = np.std(false_samples)
true_mu = np.mean(true_samples)
true_sigma = np.std(true_samples)
lowest = np.minimum(np.min(false_samples), np.min(true_samples))
highest = np.maximum(np.max(false_samples), np.max(true_samples))
a = np.arange(lowest, highest, (highest-lowest)/25)
p_x_false = gaussian(a, false_mu, false_sigma)
p_x_true = gaussian(a, true_mu, true_sigma)
p_x = p_x_true + p_x_false
p_true = p_x_true / p_x
popt, _ = curve_fit(sigmoid, a, p_true)
centre, gain = popt[0], popt[1]
if do_plot:
plt.hist(false_samples, a)
plt.hist(true_samples, a)
plt.plot(a, 100*sigmoid(a, centre, gain))
plt.plot(a, 100*p_true)
plt.title('centre: ' + str(centre) + ' gain: ' + str(gain))
plt.show()
return centre, gain
def check_sigmoid():
n = 1000
false_samples = 1 + .3*np.random.randn(n)
true_samples = -1 + 1*np.random.randn(n)
centre, gain = get_sigmoid_params(false_samples, true_samples, do_plot=True)
def get_convolutional_prototypes(samples, shape, patches_per_sample=5):
assert len(samples.shape) == 4
assert len(shape) == 4
wiggle = (samples.shape[2]-shape[2], samples.shape[3]-shape[3])
patches = []
for sample in samples:
for i in range(patches_per_sample):
corner = (np.random.randint(0, wiggle[0]), np.random.randint(0, wiggle[1]))
patches.append(sample[:,corner[0]:corner[0]+shape[2],corner[1]:corner[1]+shape[3]])
patches = np.array(patches)
flat = np.reshape(patches, (patches.shape[0], -1))
km = KMeans(shape[0])
km.fit(flat)
kernels = km.cluster_centers_
# normalize product of centre and corresponding kernel
for i in range(kernels.shape[0]):
kernels[i,:] = kernels[i,:] / np.linalg.norm(kernels[i,:])
return np.reshape(kernels, shape)
def get_dense_prototypes(samples, n):
km = KMeans(n)
km.fit(samples)
return km.cluster_centers_
def check_get_prototypes():
samples = np.random.rand(1000, 2, 28, 28)
prototypes = get_convolutional_prototypes(samples, (20,2,5,5))
print(prototypes.shape)
samples = np.random.rand(900, 2592)
prototypes = get_dense_prototypes(samples, 64)
print(prototypes.shape)
def get_discriminant(samples, labels):
lda = LinearDiscriminantAnalysis(solver='eigen', shrinkage='auto')
lda.fit(samples, labels)
return lda.coef_[0]
def check_discriminant():
n = 1000
labels = np.random.rand(n) < 0.5
samples = np.zeros((n,2))
for i in range(len(labels)):
if labels[i] > 0.5:
samples[i,:] = np.array([0,1]) + 1*np.random.randn(1,2)
else:
samples[i,:] = np.array([-2,-1]) + .5*np.random.randn(1,2)
coeff = get_discriminant(samples, labels)
plt.figure(figsize=(10,5))
plt.subplot(1,2,1)
plt.scatter(samples[labels>.5,0], samples[labels>.5,1], color='g')
plt.scatter(samples[labels<.5,0], samples[labels<.5,1], color='r')
plt.plot([-coeff[0], coeff[0]], [-coeff[1], coeff[1]], color='k')
plt.subplot(1,2,2)
get_sigmoid_params(np.dot(samples[labels<.5], coeff),
np.dot(samples[labels>.5], coeff),
do_plot=True)
plt.show()
def init_model(model, X_train, Y_train):
if not (isinstance(model.layers[-1], Activation) \
and model.layers[-1].activation.__name__ == 'sigmoid'\
and isinstance(model.layers[-2], Dense)):
raise Exception('This does not look like an LDA-compatible network, which is all we support')
for i in range(len(model.layers)-2):
if isinstance(model.layers[i], Convolution2D):
inputs = get_inputs(model, X_train, i)
w, b = model.layers[i].get_weights()
w = get_convolutional_prototypes(inputs, w.shape)
b = .1 * np.ones_like(b)
model.layers[i].set_weights([w,b])
if isinstance(model.layers[i], Dense):
inputs = get_inputs(model, X_train, i)
w, b = model.layers[i].get_weights()
w = get_dense_prototypes(inputs, w.shape[1]).T
b = .1 * np.ones_like(b)
model.layers[i].set_weights([w,b])
inputs = get_inputs(model, X_train, len(model.layers)-3)
coeff = get_discriminant(inputs, Y_train)
centre, gain = get_sigmoid_params(np.dot(inputs[Y_train<.5], coeff),
np.dot(inputs[Y_train>.5], coeff))
w = coeff*gain
w = w[:,np.newaxis]
b = np.array([-centre])
model.layers[-2].set_weights([w,b])
sigmoid_inputs = get_inputs(model, X_train, len(model.layers)-1)
plt.figure()
plt.subplot(2,1,1)
bins = np.arange(np.min(Y_train), np.max(Y_train))
plt.hist(sigmoid_inputs[Y_train<.5])
plt.subplot(2,1,2)
plt.hist(sigmoid_inputs[Y_train>.5])
plt.show()
def get_inputs(model, X_train, layer):
if layer == 0:
return X_train
else:
partial_model = Sequential(layers=model.layers[:layer])
partial_model.compile('sgd', 'mse')
return partial_model.predict(X_train)
if __name__ == '__main__':
# check_sigmoid()
# check_get_prototypes()
# check_discriminant()
import cPickle
f = file('../data/bowl-test.pkl', 'rb')
# f = file('../data/depths/24_bowl-29-Feb-2016-15-01-53.pkl', 'rb')
d, bd, l = cPickle.load(f)
f.close()
d = d - np.mean(d.flatten())
d = d / np.std(d.flatten())
# n = 900
n = 90
X_train = np.zeros((n,1,80,80))
X_train[:,0,:,:] = d[:n,:,:]
Y_train = l[:n]
model = Sequential()
model.add(Convolution2D(64,9,9,input_shape=(1,80,80)))
model.add(Activation('relu'))
model.add(MaxPooling2D())
# model.add(Convolution2D(64,3,3))
# model.add(Activation('relu'))
# model.add(MaxPooling2D())
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dense(1))
model.add(Activation('sigmoid'))
init_model(model, X_train, Y_train)
# from visualize import plot_kernels
# plot_kernels(model.layers[0].get_weights()[0])
|
tokens = [
'LPAREN',
'RPAREN',
'LBRACE',
'RBRACE',
'EQUAL',
'DOUBLE_EQUAL',
'NUMBER',
'COMMA',
'VAR_DEFINITION',
'IF',
'ELSE',
'END',
'ID',
'PRINT'
]
t_LPAREN = r"\("
t_RPAREN = r"\)"
t_LBRACE = r"\{"
t_RBRACE = r"\}"
t_EQUAL = r"\="
t_DOUBLE_EQUAL = r"\=\="
def t_NUMBER(token):
r"[0-9]+"
token.value = int(token.value)
return token
t_COMMA = r","
def t_VAR_DEFINITION(token):
r",\sFirst\sof\s(his|her)\sName"
return token
def t_IF(token):
r"I\spromise"
return token
def t_ELSE(token):
r"Mayhaps"
return token
def t_PRINT(token):
r"Hodor"
return token
def t_END(token):
r"And\snow\shis\swatch\sis\sended"
return token
def t_ID(token):
r"[a-zA-Z][_a-zA-Z0-9]*"
return token
t_ignore = " \t"
def t_NEWLINE(token):
r"\n+"
token.lexer.lineno += len(token.value)
def t_IGNORE_COMMENTS(token):
r"//(.*)\n"
token.lexer.lineno += 1
def t_error(token):
raise Exception("Sintax error: Unknown token on line {0}. \"{1}\"".format(token.lineno, token.value.partition("\n")[0]))
|
import pyqtgraph as pg
from pyqtgraph.Qt import QtGui, QtCore
import numpy#
from pyqtgraph.parametertree import Parameter, ParameterTree, ParameterItem, registerParameterType
class FeatureSelectionDialog(QtGui.QDialog):
def __init__(self,viewer, parent):
super(FeatureSelectionDialog, self).__init__(parent)
self.resize(800,600)
self.viewer = viewer
self.layout = QtGui.QVBoxLayout()
self.setLayout(self.layout)
self.buttonBox = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok|QtGui.QDialogButtonBox.Cancel)
self.buttonBox.accepted.connect(self.onPressAccepted)
def makeCheckBox(name, val=True):
return {
'name': name,
'type': 'bool',
'value': val,
#'tip': "This is a checkbox",
}
sigmaOpt = {'name': 'sigma', 'type': 'str', 'value': '[0.0, 1.0, 2.0, 4.0]' }
wardOpts = {'name': 'wardness', 'type': 'str', 'value': '[0.0, 0.1, 0.2]' }
filterChild = [
makeCheckBox("computeFilter"),
sigmaOpt,
{
'name':'UCM',
'children': [
makeCheckBox("ucmFilters"),
wardOpts,
{'name': 'meanSign', 'type': 'float', 'value': '1.0' }
]
}
]
params = [
{
'name' : "RawData",
'type' : 'group',
'children' : [
{
'name': 'Compute Features On Raw Data',
'type': 'bool',
'value': True,
'tip': "This is a checkbox",
},
{
'name' : "0-Order Filter",
'type' : 'group',
'children' : filterChild
},
{
'name' : "1-Order Filter",
'type' : 'group',
'children' : filterChild
},
{
'name' : "2-Order Filter",
'type' : 'group',
'children' : filterChild
}
]
},
#ComplexParameter(name='Custom parameter group (reciprocal values)'),
#ScalableGroup(name="Expandable Parameter Group", children=[
# {'name': 'ScalableParam 1', 'type': 'str', 'value': "default param 1"},
# {'name': 'ScalableParam 2', 'type': 'str', 'value': "default param 2"},
#]),
]
## Create tree of Parameter objects
self.p = Parameter.create(name='params', type='group', children=params)
self.t = ParameterTree()
self.t.setParameters(self.p, showTop=False)
self.layout.addWidget(self.t)
self.layout.addWidget(self.buttonBox)
## If anything changes in the tree, print a message
def change(param, changes):
print("tree changes:")
for param, change, data in changes:
path = self.p.childPath(param)
if path is not None:
childName = '.'.join(path)
else:
childName = param.name()
print(' parameter: %s'% childName)
print(' change: %s'% change)
print(' data: %s'% str(data))
print(' ----------')
self.p.sigTreeStateChanged.connect(change)
def onPressAccepted(self):
self.hide()
self.viewer.onClickedComputeFeaturesImpl(self.p)
def keyPressEvent(self, event):
if event.key() == QtCore.Qt.Key_Escape:
self.hide()
event.accept()
else:
super(QtGui.QDialog, self).keyPressEvent(event)
|
from engine.api import API
from engine.utils.printing_utils import progressBar
from setup.utils.datastore_utils import repair_corrupt_reference, link_references_to_paper
def remove_duplicates_from_cited_by():
print("\nRemove Duplicates")
api = API()
papers = api.get_all_paper()
for i, paper in enumerate(papers):
progressBar(i, len(papers))
paper.cited_by = list(dict.fromkeys(paper.cited_by))
api.client.update_paper(paper)
def check_references():
print("\nCheck References")
api = API()
papers = api.get_all_paper()
for i, paper in enumerate(papers):
progressBar(i, len(papers))
other_papers = [p for p in papers if p.id != paper.id]
for reference in paper.references:
if not reference.get_paper_id():
continue
ref_paper = api.get_paper(reference.get_paper_id())
if ref_paper.cited_by.count(paper.id) == 0:
print()
reference.paper_id = []
api.client.update_paper(paper)
repair_corrupt_reference(reference, paper, other_papers, api)
def check_cited_by():
print("\nCheck Cited by")
api = API()
papers = api.get_all_paper()
for i, paper in enumerate(papers):
progressBar(i, len(papers))
for cited_paper_id in paper.cited_by:
if not api.contains_paper(cited_paper_id):
paper.cited_by.remove(cited_paper_id)
api.client.update_paper(paper)
continue
cited_paper = api.get_paper(cited_paper_id)
cited_paper_refs = [ref.get_paper_id() for ref in cited_paper.references if ref.get_paper_id()]
if cited_paper_refs.count(paper.id) == 0:
print()
paper.cited_by.remove(cited_paper_id)
api.client.update_paper(paper)
link_references_to_paper(cited_paper, paper, api)
def perform_checks():
check_cited_by()
remove_duplicates_from_cited_by()
check_references()
if __name__ == "__main__":
perform_checks()
exit(0)
|
from polyphony import testbench
def g(x):
if x == 0:
return 0
return 1
def h(x):
if x == 0:
pass
def f(v, i, j, k):
if i == 0:
return v
elif i == 1:
return v
elif i == 2:
h(g(j) + g(k))
return v
elif i == 3:
for m in range(j):
v += 2
return v
else:
for n in range(i):
v += 1
return v
def if28(code, r1, r2, r3, r4):
if code == 0:
return f(r1, r2, r3, r4)
return 0
@testbench
def test():
assert 1 == if28(0, 1, 1, 0, 0)
assert 2 == if28(0, 2, 0, 0, 0)
assert 3 == if28(0, 3, 1, 0, 0)
assert 4 == if28(0, 4, 2, 0, 0)
assert 5 == if28(0, 5, 2, 1, 1)
assert 6 == if28(0, 6, 2, 2, 2)
assert 7 == if28(0, 7, 3, 0, 0)
assert 10 == if28(0, 8, 3, 1, 1)
assert 13 == if28(0, 9, 3, 2, 2)
assert 14 == if28(0, 10, 4, 0, 0)
test()
|
import os
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
sys.path.append(os.path.dirname(BASE_DIR))
from global_variables import *
from evaluation_helper import *
cls_names = g_shape_names
img_name_file_list = [os.path.join(g_real_images_voc12val_det_bbox_folder, name+'.txt') for name in cls_names]
det_bbox_mat_file_list = [os.path.join(g_detection_results_folder, x.rstrip()) for x in open(g_rcnn_detection_bbox_mat_filelist)]
result_folder = os.path.join(BASE_DIR, 'avp_test_results')
test_avp_nv(cls_names, img_name_file_list, det_bbox_mat_file_list, result_folder)
img_name_file_list = [os.path.join(g_real_images_voc12val_easy_gt_bbox_folder, name+'.txt') for name in cls_names]
view_label_folder = g_real_images_voc12val_easy_gt_bbox_folder
result_folder = os.path.join(BASE_DIR, 'vp_test_results')
test_vp_acc(cls_names, img_name_file_list, result_folder, view_label_folder)
|
from boto.swf.exceptions import SWFResponseError
from swf.constants import REGISTERED
from swf.querysets.base import BaseQuerySet
from swf.models import Domain
from swf.models.workflow import (WorkflowType, WorkflowExecution,
CHILD_POLICIES)
from swf.utils import datetime_timestamp, past_day, get_subkey
from swf.exceptions import (ResponseError, DoesNotExistError,
InvalidKeywordArgumentError, AlreadyExistsError)
class BaseWorkflowQuerySet(BaseQuerySet):
"""Base domain bounded workflow queryset objects
Amazon workflows types and executions are always bounded
to a specific domain: so any queryset which means to deal
with workflows has to be built against a `domain`
:param domain: domain the inheriting queryset belongs to
:type domain: swf.model.domain.Domain
"""
# Amazon response section corresponding
# to current queryset informations
_infos = 'typeInfo'
_infos_plural = 'typeInfos'
def __init__(self, domain, *args, **kwargs):
super(BaseWorkflowQuerySet, self).__init__(*args, **kwargs)
Domain.check(domain)
self.domain = domain
@property
def domain(self):
if not hasattr(self, '_domain'):
self._domain = None
return self._domain
@domain.setter
def domain(self, value):
# Avoiding circular import
from swf.models.domain import Domain
if not isinstance(value, Domain):
err = "domain property has to be of"\
"swf.model.domain.Domain type, not %r"\
% type(value)
raise TypeError(err)
self._domain = value
def _list(self, *args, **kwargs):
raise NotImplementedError
def _list_items(self, *args, **kwargs):
response = {'nextPageToken': None}
while 'nextPageToken' in response:
response = self._list(
*args,
next_page_token=response['nextPageToken'],
**kwargs
)
for item in response[self._infos_plural]:
yield item
class WorkflowTypeQuerySet(BaseWorkflowQuerySet):
# Explicit is better than implicit, keep zen
_infos = 'typeInfo'
_infos_plural = 'typeInfos'
def to_WorkflowType(self, domain, workflow_info, **kwargs):
# Not using get_subkey in order for it to explictly
# raise when workflowType name doesn't exist for example
return WorkflowType(
domain,
workflow_info['workflowType']['name'],
workflow_info['workflowType']['version'],
status=workflow_info['status'],
**kwargs
)
def get(self, name, version, *args, **kwargs):
"""Fetches the Workflow Type with `name` and `version`
:param name: name of the workflow type
:type name: String
:param version: workflow type version
:type version: String
:returns: matched workflow type instance
:rtype: swf.core.model.workflow.WorkflowType
A typical Amazon response looks like:
.. code-block:: json
{
"configuration": {
"defaultExecutionStartToCloseTimeout": "300",
"defaultTaskStartToCloseTimeout": "300",
"defaultTaskList": {
"name": "None"
},
"defaultChildPolicy": "TERMINATE"
},
"typeInfo": {
"status": "REGISTERED",
"creationDate": 1364492094.968,
"workflowType": {
"version": "1",
"name": "testW"
}
}
}
"""
try:
response = self.connection.describe_workflow_type(self.domain.name, name, version)
except SWFResponseError as e:
if e.error_code == 'UnknownResourceFault':
raise DoesNotExistError(e.body['message'])
raise ResponseError(e.body['message'])
wt_info = response[self._infos]
wt_config = response['configuration']
task_list = kwargs.get('task_list')
if task_list is None:
task_list = get_subkey(wt_config, ['defaultTaskList', 'name'])
child_policy = kwargs.get('child_policy')
if child_policy is None:
child_policy = wt_config.get('defaultChildPolicy')
decision_task_timeout = kwargs.get('decision_task_timeout')
if decision_task_timeout is None:
decision_task_timeout = wt_config.get(
'defaultTaskStartToCloseTimeout')
execution_timeout = kwargs.get('execution_timeout')
if execution_timeout is None:
execution_timeout = wt_config.get(
'defaultExecutionStartToCloseTimeout')
decision_tasks_timeout = kwargs.get('decision_tasks_timeout')
if decision_tasks_timeout is None:
decision_tasks_timeout = wt_config.get(
'defaultTaskStartToCloseTimeout')
return self.to_WorkflowType(
self.domain,
wt_info,
task_list=task_list,
child_policy=child_policy,
execution_timeout=execution_timeout,
decision_tasks_timeout=decision_tasks_timeout,
)
def get_or_create(self, name, version,
status=REGISTERED,
creation_date=0.0,
deprecation_date=0.0,
task_list=None,
child_policy=CHILD_POLICIES.TERMINATE,
execution_timeout='300',
decision_tasks_timeout='300',
description=None,
*args, **kwargs):
"""Fetches, or creates the ActivityType with ``name`` and ``version``
When fetching trying to fetch a matching workflow type, only
name and version parameters are taken in account.
Anyway, If you'd wanna make sure that in case the workflow type
has to be created it is made with specific values, just provide it.
:param name: name of the workflow type
:type name: String
:param version: workflow type version
:type version: String
:param status: workflow type status
:type status: swf.core.ConnectedSWFObject.{REGISTERED, DEPRECATED}
:param creation_date: creation date of the current WorkflowType
:type creation_date: float (timestamp)
:param deprecation_date: deprecation date of WorkflowType
:type deprecation_date: float (timestamp)
:param task_list: task list to use for scheduling decision tasks for executions
of this workflow type
:type task_list: String
:param child_policy: policy to use for the child workflow executions
when a workflow execution of this type is terminated
:type child_policy: CHILD_POLICIES.{TERMINATE |
REQUEST_CANCEL |
ABANDON}
:param execution_timeout: maximum duration for executions of this workflow type
:type execution_timeout: String
:param decision_tasks_timeout: maximum duration of decision tasks for this workflow type
:type decision_tasks_timeout: String
:param description: Textual description of the workflow type
:type description: String
:returns: Fetched or created WorkflowType model object
:rtype: WorkflowType
"""
try:
return self.get(name,
version,
task_list=task_list,
child_policy=child_policy,
execution_timeout=execution_timeout,
decision_tasks_timeout=decision_tasks_timeout)
except DoesNotExistError:
try:
return self.create(
name,
version,
status=status,
creation_date=creation_date,
deprecation_date=deprecation_date,
task_list=task_list,
child_policy=child_policy,
execution_timeout=execution_timeout,
decision_tasks_timeout=decision_tasks_timeout,
description=description,
)
# race conditon could happen if two workflows trying to register the same type
except AlreadyExistsError:
return self.get(name,
version,
task_list=task_list,
child_policy=child_policy,
execution_timeout=execution_timeout,
decision_tasks_timeout=decision_tasks_timeout)
def _list(self, *args, **kwargs):
return self.connection.list_workflow_types(*args, **kwargs)
def filter(self, domain=None,
registration_status=REGISTERED,
name=None,
*args, **kwargs):
"""Filters workflows based on the ``domain`` they belong to,
their ``status``, and/or their ``name``
:param domain: domain the workflow type belongs to
:type domain: swf.models.domain.Domain
:param registration_status: workflow type registration status to match,
Valid values are:
* ``swf.constants.REGISTERED``
* ``swf.constants.DEPRECATED``
:type registration_status: string
:param name: workflow type name to match
:type name: string
:returns: list of matched WorkflowType models objects
:rtype: list
"""
# As WorkflowTypeQuery has to be built against a specific domain
# name, domain filter is disposable, but not mandatory.
domain = domain or self.domain
return [self.to_WorkflowType(domain, wf) for wf in
self._list_items(domain.name, registration_status, name=name)]
def all(self, registration_status=REGISTERED, *args, **kwargs):
"""Retrieves every Workflow types
:param registration_status: workflow type registration status to match,
Valid values are:
* ``swf.constants.REGISTERED``
* ``swf.constants.DEPRECATED``
:type registration_status: string
A typical Amazon response looks like:
.. code-block:: json
{
"typeInfos": [
{
"status": "REGISTERED",
"creationDate": 1364293450.67,
"description": "",
"workflowType": {
"version": "1",
"name": "Crawl"
}
},
{
"status": "REGISTERED",
"creationDate": 1364492094.968,
"workflowType": {
"version": "1",
"name": "testW"
}
}
]
}
"""
return self.filter(registration_status=registration_status)
def create(self, name, version,
status=REGISTERED,
creation_date=0.0,
deprecation_date=0.0,
task_list=None,
child_policy=CHILD_POLICIES.TERMINATE,
execution_timeout='300',
decision_tasks_timeout='300',
description=None,
*args, **kwargs):
"""Creates a new remote workflow type and returns the
created WorkflowType model instance.
:param name: name of the workflow type
:type name: String
:param version: workflow type version
:type version: String
:param status: workflow type status
:type status: swf.core.ConnectedSWFObject.{REGISTERED, DEPRECATED}
:param creation_date: creation date of the current WorkflowType
:type creation_date: float (timestamp)
:param deprecation_date: deprecation date of WorkflowType
:type deprecation_date: float (timestamp)
:param task_list: task list to use for scheduling decision tasks for executions
of this workflow type
:type task_list: String
:param child_policy: policy to use for the child workflow executions
when a workflow execution of this type is terminated
:type child_policy: CHILD_POLICIES.{TERMINATE |
REQUEST_CANCEL |
ABANDON}
:param execution_timeout: maximum duration for executions of this workflow type
:type execution_timeout: String
:param decision_tasks_timeout: maximum duration of decision tasks for this workflow type
:type decision_tasks_timeout: String
:param description: Textual description of the workflow type
:type description: String
"""
workflow_type = WorkflowType(
self.domain,
name,
version,
status=status,
creation_date=creation_date,
deprecation_date=deprecation_date,
task_list=task_list,
child_policy=child_policy,
execution_timeout=execution_timeout,
decision_tasks_timeout=decision_tasks_timeout,
description=description
)
workflow_type.save()
return workflow_type
class WorkflowExecutionQuerySet(BaseWorkflowQuerySet):
"""Fetches Workflow executions"""
_infos = 'executionInfo'
_infos_plural = 'executionInfos'
def _is_valid_status_param(self, status, param):
statuses = {
WorkflowExecution.STATUS_OPEN: set([
'oldest_date',
'latest_date'],
),
WorkflowExecution.STATUS_CLOSED: set([
'start_latest_date',
'start_oldest_date',
'close_latest_date',
'close_oldest_date',
'close_status'
]),
}
return param in statuses.get(status, set())
def _validate_status_parameters(self, status, params):
return [param for param in params if
not self._is_valid_status_param(status, param)]
def list_workflow_executions(self, status, *args, **kwargs):
statuses = {
WorkflowExecution.STATUS_OPEN: 'open',
WorkflowExecution.STATUS_CLOSED: 'closed',
}
# boto.swf.list_closed_workflow_executions awaits a `start_oldest_date`
# MANDATORY kwarg, when boto.swf.list_open_workflow_executions awaits a
# `oldest_date` mandatory arg.
if status == WorkflowExecution.STATUS_OPEN:
kwargs['oldest_date'] = kwargs.pop('start_oldest_date')
try:
method = 'list_{}_workflow_executions'.format(statuses[status])
return getattr(self.connection, method)(*args, **kwargs)
except KeyError:
raise ValueError("Unknown status provided: %s" % status)
def get_workflow_type(self, execution_info):
workflow_type = execution_info['workflowType']
workflow_type_qs = WorkflowTypeQuerySet(self.domain)
return workflow_type_qs.get(
workflow_type['name'],
workflow_type['version'],
)
def to_WorkflowExecution(self, domain, execution_info, **kwargs):
workflow_type = WorkflowType(
self.domain,
execution_info['workflowType']['name'],
execution_info['workflowType']['version']
)
return WorkflowExecution(
domain,
get_subkey(execution_info, ['execution', 'workflowId']), # workflow_id
run_id=get_subkey(execution_info, ['execution', 'runId']),
workflow_type=workflow_type,
status=execution_info.get('executionStatus'),
close_status=execution_info.get('closeStatus'),
tag_list=execution_info.get('tagList'),
start_timestamp=execution_info.get('startTimestamp'),
close_timestamp=execution_info.get('closeTimestamp'),
cancel_requested=execution_info.get('cancelRequested'),
parent=execution_info.get('parent'),
**kwargs
)
def get(self, workflow_id, run_id, *args, **kwargs):
""" """
try:
response = self.connection.describe_workflow_execution(
self.domain.name,
run_id,
workflow_id)
except SWFResponseError as e:
if e.error_code == 'UnknownResourceFault':
raise DoesNotExistError(e.body['message'])
raise ResponseError(e.body['message'])
execution_info = response[self._infos]
execution_config = response['executionConfiguration']
return self.to_WorkflowExecution(
self.domain,
execution_info,
task_list=get_subkey(execution_config, ['taskList', 'name']),
child_policy=execution_config.get('childPolicy'),
execution_timeout=execution_config.get('executionStartToCloseTimeout'),
decision_tasks_timeout=execution_config.get('taskStartToCloseTimeout'),
latest_activity_task_timestamp=response.get('latestActivityTaskTimestamp'),
latest_execution_context=response.get('latestExecutionContext'),
open_counts=response['openCounts'],
)
def filter(self,
status=WorkflowExecution.STATUS_OPEN, tag=None,
workflow_id=None, workflow_type_name=None,
workflow_type_version=None,
*args, **kwargs):
"""Filters workflow executions based on kwargs provided criteras
:param status: workflow executions with provided status will be kept.
Valid values are:
* ``swf.models.WorkflowExecution.STATUS_OPEN``
* ``swf.models.WorkflowExecution.STATUS_CLOSED``
:type status: string
:param tag: workflow executions containing the tag will be kept
:type tag: String
:param workflow_id: workflow executions attached to the id will be kept
:type workflow_id: String
:param workflow_type_name: workflow executions attached to the workflow type
with provided name will be kept
:type workflow_type_name: String
:param workflow_type_version: workflow executions attached to the workflow type
of the provided version will be kept
:type workflow_type_version: String
**Be aware that** querying over status allows the usage of statuses specific
kwargs
* STATUS_OPEN
:param start_latest_date: latest start or close date and time to return (in days)
:type start_latest_date: int
* STATUS_CLOSED
:param start_latest_date: workflow executions that meet the start time criteria
of the filter are kept (in days)
:type start_latest_date: int
:param start_oldest_date: workflow executions that meet the start time criteria
of the filter are kept (in days)
:type start_oldest_date: int
:param close_latest_date: workflow executions that meet the close time criteria
of the filter are kept (in days)
:type close_latest_date: int
:param close_oldest_date: workflow executions that meet the close time criteria
of the filter are kept (in days)
:type close_oldest_date: int
:param close_status: must match the close status of an execution for it
to meet the criteria of this filter.
Valid values are:
* ``CLOSE_STATUS_COMPLETED``
* ``CLOSE_STATUS_FAILED``
* ``CLOSE_STATUS_CANCELED``
* ``CLOSE_STATUS_TERMINATED``
* ``CLOSE_STATUS_CONTINUED_AS_NEW``
* ``CLOSE_TIMED_OUT``
:type close_status: string
:returns: workflow executions objects list
:rtype: list
"""
# As WorkflowTypeQuery has to be built against a specific domain
# name, domain filter is disposable, but not mandatory.
invalid_kwargs = self._validate_status_parameters(status, kwargs)
if invalid_kwargs:
err_msg = 'Invalid keyword arguments supplied: {}'.format(
', '.join(invalid_kwargs))
raise InvalidKeywordArgumentError(err_msg)
if status == WorkflowExecution.STATUS_OPEN:
oldest_date = kwargs.pop('oldest_date', 30)
else:
# The SWF docs on ListClosedWorkflowExecutions state that:
#
# "startTimeFilter and closeTimeFilter are mutually exclusive"
#
# so we must figure out if we have to add a default value for
# start_oldest_date or not.
if "close_latest_date" in kwargs or "close_oldest_date" in kwargs:
default_oldest_date = None
else:
default_oldest_date = 30
oldest_date = kwargs.pop('start_oldest_date', default_oldest_date)
# Compute a timestamp from the delta in days we got from params
# If oldest_date is blank at this point, it's because we didn't want
# it, so let's leave it blank and assume the user provided an other
# time filter.
if oldest_date:
start_oldest_date = int(datetime_timestamp(past_day(oldest_date)))
else:
start_oldest_date = None
return [self.to_WorkflowExecution(self.domain, wfe) for wfe in
self._list_items(
*args,
domain=self.domain.name,
status=status,
workflow_id=workflow_id,
workflow_name=workflow_type_name,
workflow_version=workflow_type_version,
start_oldest_date=start_oldest_date,
tag=tag,
**kwargs
)]
def _list(self, *args, **kwargs):
return self.list_workflow_executions(*args, **kwargs)
def all(self, status=WorkflowExecution.STATUS_OPEN,
start_oldest_date=30,
*args, **kwargs):
"""Fetch every workflow executions during the last `start_oldest_date`
days, with `status`
:param status: Workflow executions status filter
:type status: swf.models.WorkflowExecution.{STATUS_OPEN, STATUS_CLOSED}
:param start_oldest_date: Specifies the oldest start/close date to return.
:type start_oldest_date: integer (days)
:returns: workflow executions objects list
:rtype: list
A typical amazon response looks like:
.. code-block:: json
{
"executionInfos": [
{
"cancelRequested": "boolean",
"closeStatus": "string",
"closeTimestamp": "number",
"execution": {
"runId": "string",
"workflowId": "string"
},
"executionStatus": "string",
"parent": {
"runId": "string",
"workflowId": "string"
},
"startTimestamp": "number",
"tagList": [
"string"
],
"workflowType": {
"name": "string",
"version": "string"
}
}
],
"nextPageToken": "string"
}
"""
start_oldest_date = datetime_timestamp(past_day(start_oldest_date))
return [self.to_WorkflowExecution(self.domain, wfe) for wfe
in self._list_items(
status,
self.domain.name,
start_oldest_date=int(start_oldest_date))]
|
import tensorflow as tf
from ocnn import *
def network_resnet(octree, flags, training=True, reuse=None):
depth = flags.depth
channels = [2048, 1024, 512, 256, 128, 64, 32, 16, 8]
with tf.variable_scope("ocnn_resnet", reuse=reuse):
data = octree_property(octree, property_name="feature", dtype=tf.float32,
depth=depth, channel=flags.channel)
data = tf.reshape(data, [1, flags.channel, -1, 1])
with tf.variable_scope("conv1"):
data = octree_conv_bn_relu(data, octree, depth, channels[depth], training)
for d in range(depth, 2, -1):
for i in range(0, flags.resblock_num):
with tf.variable_scope('resblock_%d_%d' % (d, i)):
data = octree_resblock(data, octree, d, channels[d], 1, training)
with tf.variable_scope('max_pool_%d' % d):
data, _ = octree_max_pool(data, octree, d)
with tf.variable_scope("global_average"):
data = octree_full_voxel(data, depth=2)
data = tf.reduce_mean(data, 2)
if flags.dropout[0]:
data = tf.layers.dropout(data, rate=0.5, training=training)
with tf.variable_scope("fc2"):
logit = dense(data, flags.nout, use_bias=True)
return logit
def network_ocnn(octree, flags, training=True, reuse=None):
depth = flags.depth
channels = [512, 256, 128, 64, 32, 16, 8, 4, 2]
with tf.variable_scope("ocnn", reuse=reuse):
data = octree_property(octree, property_name="feature", dtype=tf.float32,
depth=depth, channel=flags.channel)
data = tf.reshape(data, [1, flags.channel, -1, 1])
for d in range(depth, 2, -1):
with tf.variable_scope('depth_%d' % d):
data = octree_conv_bn_relu(data, octree, d, channels[d], training)
data, _ = octree_max_pool(data, octree, d)
with tf.variable_scope("full_voxel"):
data = octree_full_voxel(data, depth=2)
data = tf.layers.dropout(data, rate=0.5, training=training)
with tf.variable_scope("fc1"):
data = fc_bn_relu(data, channels[2], training=training)
data = tf.layers.dropout(data, rate=0.5, training=training)
with tf.variable_scope("fc2"):
logit = dense(data, flags.nout, use_bias=True)
return logit
def cls_network(octree, flags, training, reuse=False):
if flags.name.lower() == 'ocnn':
return network_ocnn(octree, flags, training, reuse)
elif flags.name.lower() == 'resnet':
return network_resnet(octree, flags, training, reuse)
else:
print('Error, no network: ' + flags.name)
|
from distutils.core import setup
from dangagearman import __version__ as version
setup(
name = 'danga-gearman',
version = version,
description = 'Client for the Danga (Perl) Gearman implementation',
author = 'Samuel Stauffer',
author_email = 'samuel@descolada.com',
url = 'http://github.com/saymedia/python-danga-gearman/tree/master',
packages = ['dangagearman'],
classifiers = [
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from __future__ import print_function
import sys
import re
from utils import CDNEngine
from utils import request
if sys.version_info >= (3, 0):
import subprocess as commands
import urllib.parse as urlparse
else:
import commands
import urlparse
def detect(hostname):
"""
Performs CDN detection thanks to information disclosure from server error.
Parameters
----------
hostname : str
Hostname to assess
"""
print('[+] Error server detection\n')
hostname = urlparse.urlparse(hostname).netloc
regexp = re.compile('\\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\\b')
out = commands.getoutput("host " + hostname)
addresses = regexp.finditer(out)
for addr in addresses:
res = request.do('http://' + addr.group())
if res is not None and res.status_code == 500:
CDNEngine.find(res.text.lower())
|
"""
=================================================
Modeling quasi-seasonal trends with date features
=================================================
Some trends are common enough to appear seasonal, yet sporadic enough that
approaching them from a seasonal perspective may not be valid. An example of
this is the `"end-of-the-month" effect <https://robjhyndman.com/hyndsight/monthly-seasonality/>`_.
In this example, we'll explore how we can create meaningful features that
express seasonal trends without needing to fit a seasonal model.
.. raw:: html
<br/>
"""
print(__doc__)
import pmdarima as pm
from pmdarima import arima
from pmdarima import model_selection
from pmdarima import pipeline
from pmdarima import preprocessing
from pmdarima.datasets._base import load_date_example
import numpy as np
from matplotlib import pyplot as plt
print(f"pmdarima version: {pm.__version__}")
y, X = load_date_example()
y_train, y_test, X_train, X_test = \
model_selection.train_test_split(y, X, test_size=20)
pm.tsdisplay(y_train, lag_max=10)
n_diffs = arima.ndiffs(y_train, max_d=5)
date_feat = preprocessing.DateFeaturizer(
column_name="date", # the name of the date feature in the X matrix
with_day_of_week=True,
with_day_of_month=True)
_, X_train_feats = date_feat.fit_transform(y_train, X_train)
print(f"Head of generated X features:\n{repr(X_train_feats.head())}")
pipe = pipeline.Pipeline([
('date', date_feat),
('arima', arima.AutoARIMA(d=n_diffs,
trace=3,
stepwise=True,
suppress_warnings=True,
seasonal=False))
])
pipe.fit(y_train, X_train)
forecasts = pipe.predict(X=X_test)
fig = plt.figure(figsize=(16, 8))
ax = fig.add_subplot(1, 1, 1)
n_train = y_train.shape[0]
x = np.arange(n_train + forecasts.shape[0])
ax.plot(x[:n_train], y_train, color='blue', label='Training Data')
ax.plot(x[n_train:], forecasts, color='green', marker='o',
label='Predicted')
ax.plot(x[n_train:], y_test, color='red', label='Actual')
ax.legend(loc='lower left', borderaxespad=0.5)
ax.set_title('Predicted Foo')
ax.set_ylabel('# Foo')
plt.show()
|
import json
from chargebee.model import Model
from chargebee import request
from chargebee import APIError
class Plan(Model):
class Tier(Model):
fields = ["starting_unit", "ending_unit", "price", "starting_unit_in_decimal", "ending_unit_in_decimal", "price_in_decimal"]
pass
class ApplicableAddon(Model):
fields = ["id"]
pass
class AttachedAddon(Model):
fields = ["id", "quantity", "billing_cycles", "type", "quantity_in_decimal"]
pass
class EventBasedAddon(Model):
fields = ["id", "quantity", "on_event", "charge_once", "quantity_in_decimal"]
pass
fields = ["id", "name", "invoice_name", "description", "price", "currency_code", "period", \
"period_unit", "trial_period", "trial_period_unit", "trial_end_action", "pricing_model", "charge_model", \
"free_quantity", "setup_cost", "downgrade_penalty", "status", "archived_at", "billing_cycles", \
"redirect_url", "enabled_in_hosted_pages", "enabled_in_portal", "addon_applicability", "tax_code", \
"hsn_code", "taxjar_product_code", "avalara_sale_type", "avalara_transaction_type", "avalara_service_type", \
"sku", "accounting_code", "accounting_category1", "accounting_category2", "accounting_category3", \
"accounting_category4", "is_shippable", "shipping_frequency_period", "shipping_frequency_period_unit", \
"resource_version", "updated_at", "giftable", "claim_url", "free_quantity_in_decimal", "price_in_decimal", \
"invoice_notes", "taxable", "tax_profile_id", "meta_data", "tiers", "applicable_addons", "attached_addons", \
"event_based_addons", "show_description_in_invoices", "show_description_in_quotes"]
@staticmethod
def create(params, env=None, headers=None):
return request.send('post', request.uri_path("plans"), params, env, headers)
@staticmethod
def update(id, params=None, env=None, headers=None):
return request.send('post', request.uri_path("plans",id), params, env, headers)
@staticmethod
def list(params=None, env=None, headers=None):
return request.send_list_request('get', request.uri_path("plans"), params, env, headers)
@staticmethod
def retrieve(id, env=None, headers=None):
return request.send('get', request.uri_path("plans",id), None, env, headers)
@staticmethod
def delete(id, env=None, headers=None):
return request.send('post', request.uri_path("plans",id,"delete"), None, env, headers)
@staticmethod
def copy(params, env=None, headers=None):
return request.send('post', request.uri_path("plans","copy"), params, env, headers)
@staticmethod
def unarchive(id, env=None, headers=None):
return request.send('post', request.uri_path("plans",id,"unarchive"), None, env, headers)
|
import math
from service.fake_api_results import ALL_TITLES, OFFICIAL_COPY_RESULT, SELECTED_FULL_RESULTS
SEARCH_RESULTS_PER_PAGE = 20
def get_title(title_number):
return SELECTED_FULL_RESULTS.get(title_number)
def _get_titles(page_number):
nof_results = len(ALL_TITLES)
number_pages = math.ceil(nof_results / SEARCH_RESULTS_PER_PAGE)
start_index = page_number * SEARCH_RESULTS_PER_PAGE
end_index = start_index + SEARCH_RESULTS_PER_PAGE
return {
'number_pages': number_pages,
'number_results': nof_results,
'page_number': page_number,
'titles': ALL_TITLES[start_index:end_index],
}
def get_titles_by_postcode(postcode, page_number):
return _get_titles(page_number)
def get_titles_by_address(address, page_number):
return _get_titles(page_number)
def get_official_copy_data(title_number):
return OFFICIAL_COPY_RESULT
|
import os
import sys
on_rtd = os.environ.get("READTHEDOCS") == "True"
sys.path.insert(0, os.path.abspath(".."))
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.ifconfig",
"sphinx.ext.viewcode",
"sphinxcontrib.bibtex",
"sphinx.ext.imgconverter",
]
bibtex_bibfiles = ["source/refs.bib"]
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = "grmpy"
copyright_ = "2018, grmpy-dev team"
author = "grmpy-dev team"
version = "1.0"
release = "1.0"
language = None
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
pygments_style = "sphinx"
if on_rtd:
todo_include_todos = False
else:
todo_include_todos = True
html_theme = "sphinx_rtd_theme"
html_static_path = []
htmlhelp_basename = "grmpydoc"
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
"pointsize": "12pt",
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
"figure_align": "htbp",
}
latex_documents = [
(master_doc, "grmpy.tex", "grmpy Documentation", "Development Team", "manual")
]
man_pages = [(master_doc, "grmpy", "grmpy Documentation", [author], 1)]
texinfo_documents = [
(
master_doc,
"grmpy",
"grmpy Documentation",
author,
"grmpy",
"One line description of project.",
"Miscellaneous",
)
]
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright_
epub_exclude_files = ["search.html"]
|
import datetime
import re
import sys
from collections import deque
from decimal import Decimal
from enum import Enum
from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
from pathlib import Path
from types import GeneratorType
from typing import Any, Callable, Dict, Type, Union
from uuid import UUID
if sys.version_info >= (3, 7):
Pattern = re.Pattern
else:
# python 3.6
Pattern = re.compile('a').__class__
from .color import Color
from .types import SecretBytes, SecretStr
__all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat'
def isoformat(o: Union[datetime.date, datetime.time]) -> str:
return o.isoformat()
def decimal_encoder(dec_value: Decimal) -> Union[int, float]:
"""
Encodes a Decimal as int of there's no exponent, otherwise float
This is useful when we use ConstrainedDecimal to represent Numeric(x,0)
where a integer (but not int typed) is used. Encoding this as a float
results in failed round-tripping between encode and prase.
Our Id type is a prime example of this.
>>> decimal_encoder(Decimal("1.0"))
1.0
>>> decimal_encoder(Decimal("1"))
1
"""
if dec_value.as_tuple().exponent >= 0:
return int(dec_value)
else:
return float(dec_value)
ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = {
bytes: lambda o: o.decode(),
Color: str,
datetime.date: isoformat,
datetime.datetime: isoformat,
datetime.time: isoformat,
datetime.timedelta: lambda td: td.total_seconds(),
Decimal: decimal_encoder,
Enum: lambda o: o.value,
frozenset: list,
deque: list,
GeneratorType: list,
IPv4Address: str,
IPv4Interface: str,
IPv4Network: str,
IPv6Address: str,
IPv6Interface: str,
IPv6Network: str,
Path: str,
Pattern: lambda o: o.pattern,
SecretBytes: str,
SecretStr: str,
set: list,
UUID: str,
}
def pydantic_encoder(obj: Any) -> Any:
from dataclasses import asdict, is_dataclass
from .main import BaseModel
if isinstance(obj, BaseModel):
return obj.dict()
elif is_dataclass(obj):
return asdict(obj)
# Check the class type and its superclasses for a matching encoder
for base in obj.__class__.__mro__[:-1]:
try:
encoder = ENCODERS_BY_TYPE[base]
except KeyError:
continue
return encoder(obj)
else: # We have exited the for loop without finding a suitable encoder
raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable")
def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any:
# Check the class type and its superclasses for a matching encoder
for base in obj.__class__.__mro__[:-1]:
try:
encoder = type_encoders[base]
except KeyError:
continue
return encoder(obj)
else: # We have exited the for loop without finding a suitable encoder
return pydantic_encoder(obj)
def timedelta_isoformat(td: datetime.timedelta) -> str:
"""
ISO 8601 encoding for timedeltas.
"""
minutes, seconds = divmod(td.seconds, 60)
hours, minutes = divmod(minutes, 60)
return f'P{td.days}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S'
|
import ROOT
from math import pi, sqrt, pow, exp
import scipy.integrate
import numpy
from array import array
alpha = 7.2973e-3
m_e = 0.51099892
Z_Xe = 54
Q = 2.4578
def F(Z, KE):
E = KE + m_e
W = E/m_e
Z0 = Z + 2
if W <= 1:
W = 1 + 1e-4
if W > 2.2:
a = -8.46e-2 + 2.48e-2*Z0 + 2.37e-4*Z0**2
b = 1.15e-2 + 3.58e-4*Z0 - 6.17e-5*Z0**2
else:
a = -0.811 + 4.46e-2*Z0 + 1.08e-4*Z0**2
b = 0.673 - 1.82e-2*Z0 + 6.38e-5*Z0**2
x = sqrt(W-1)
p = sqrt(W**2 - 1)
if (p <= 0):
result = 1
else:
result = W/p*exp(a + b*x)
return result
def D(D, K, i):
Z = Z_Xe
T0 = Q/m_e
E1 = 0.5*(K+D) + 1
E2 = 0.5*(K+D) + 1
p1 = sqrt(E1**2 - 1)
p2 = sqrt(E2**2 - 1)
T1 = E1 - 1
T2 = E2 - 1
return p1*E1*F(Z, T1*m_e)*p2*E2*F(Z, T1*m_e)*pow(T0 - K, i)
def SumSpectrum(K, i):
if K < 0:
return 0
elif K > Q:
return 0
a = -K/m_e
b = K/m_e
x = scipy.integrate.quad(D, a, b, (K/m_e, i))[0]
if x < 0:
return 0
else:
return x
def gauss_conv(x, y, res):
N = len(x)
mu = numpy.mean(x)
s = res*mu
gauss = [1.0/(s*sqrt(2*pi))*exp(-0.5*((a-mu)/s)**2) for a in x]
convolution = numpy.convolve(y, gauss,'same')
return convolution
def normalize(y, eps, f):
return [a*f for a in y]
N = 1000
min_E = 0.0
max_E = 1.2
E_scaled = array('d', numpy.linspace(min_E, max_E, N, False))
Es = array('d', (E*Q for E in E_scaled))
eps = (max_E - min_E)/N
bb0n = [0.5/eps if abs(E-Q)<eps else 0 for E in Es]
bb2n = [SumSpectrum(E, 5) for E in Es]
bb0n_smeared = gauss_conv(Es, bb0n, 0.02)
bb2n_smeared = gauss_conv(Es, bb2n, 0.02)
bb0n_int = scipy.integrate.simps(bb0n_smeared, None, eps)
bb0n_norm = array('d', normalize(bb0n_smeared, eps, 1e-2/bb0n_int))
bb2n_int = scipy.integrate.simps(bb2n_smeared, None, eps)
bb2n_norm = array('d', normalize(bb2n_smeared, eps, 1/bb2n_int))
g_bb0n = ROOT.TGraph(N, E_scaled, bb0n_norm)
g_bb0n.SetTitle("")
g_bb0n.SetLineStyle(ROOT.kDashed)
g_bb2n = ROOT.TGraph(N, E_scaled, bb2n_norm)
g_bb2n.SetTitle("")
bb0nX = []
bb0nX.append([0.5/eps if abs(E-Q)<eps else 0 for E in Es])
for i in [1, 2, 3, 5, 7]:
bb0nX.append([SumSpectrum(E, i) for E in Es])
bb0nX_graphs = []
for bb0nXn in bb0nX:
bb0nX_int = scipy.integrate.simps(bb0nXn, None, eps)
bb0nX_norm = array('d', normalize(bb0nXn, eps, 1/bb0nX_int))
g_bb0nX = ROOT.TGraph(N, E_scaled, bb0nX_norm)
bb0nX_graphs.append(g_bb0nX)
min_E = 0.9
max_E = 1.1
E_scaled_z = array('d', numpy.linspace(min_E, max_E, N, False))
Es_z = array('d', (E*Q for E in E_scaled_z))
eps_z = (max_E - min_E)/N
bb0n_z = [0.5/eps_z if abs(E-Q)<eps_z else 0 for E in Es_z]
bb2n_z = [SumSpectrum(E, 5) for E in Es_z]
bb0n_smeared_z = gauss_conv(Es_z, bb0n_z, 0.02)
bb2n_smeared_z = gauss_conv(Es_z, bb2n_z, 0.02)
bb0n_norm_z = array('d', normalize(bb0n_smeared_z, eps, 1e-6/bb0n_int))
bb2n_norm_z = array('d', normalize(bb2n_smeared_z, eps, 1.0/bb2n_int))
g_bb0n_z = ROOT.TGraph(N, E_scaled_z, bb0n_norm_z)
g_bb0n_z.SetTitle("")
g_bb0n_z.SetLineStyle(ROOT.kDashed)
g_bb2n_z = ROOT.TGraph(N, E_scaled_z, bb2n_norm_z)
g_bb2n_z.SetTitle("")
c_both = ROOT.TCanvas("c_both","c_both")
p = ROOT.TPad("p", "p", 0, 0, 1, 1)
p.SetRightMargin(0.02)
p.SetTopMargin(0.02)
p.Draw()
p.cd()
g_bb2n.Draw("AL")
g_bb0n.Draw("L")
g_bb2n.GetYaxis().SetTitle("dN/dE")
g_bb2n.GetXaxis().SetTitle("Sum e^{-} Energy (E/Q)")
c_both.cd()
p_inset = ROOT.TPad("p_inset","p_inset",0.5, 0.5, 0.995, 0.995)
p_inset.SetRightMargin(0.05)
p_inset.SetTopMargin(0.05)
p_inset.Draw()
p_inset.cd()
g_bb2n_z.Draw("AL")
g_bb0n_z.Draw("L")
g_bb2n_z.GetYaxis().SetTitle("dN/dE")
g_bb2n_z.GetXaxis().SetTitle("Sum e^{-} Energy (E/Q)")
g_bb2n_z.GetYaxis().SetNoExponent(False)
g_bb2n_z.GetXaxis().SetRangeUser(1-0.25*(1-min_E), 1+0.25*(max_E-1))
g_bb2n_z.GetYaxis().SetRangeUser(0, 0.0004)
c_z = ROOT.TCanvas("c_z","c_z")
c_z.SetRightMargin(0.05)
c_z.SetTopMargin(0.05)
g_bb2n_z.Draw("AL")
g_bb0n_z.Draw("L")
c = ROOT.TCanvas("c","c")
c.SetRightMargin(0.05)
c.SetTopMargin(0.05)
g_bb2n.Draw("AL")
g_bb0n.Draw("L")
c_majoron = ROOT.TCanvas("c_majoron")
c_majoron.SetRightMargin(0.05)
c_majoron.SetTopMargin(0.05)
colors = [ROOT.kBlack, ROOT.kRed, ROOT.kGreen, ROOT.kBlue,
ROOT.kMagenta, ROOT.kCyan]
draw_opt = "AL"
for i in xrange(len(bb0nX_graphs)):
bb0nX_graphs[-(i+1)].SetLineColor(colors[-(i+1)])
bb0nX_graphs[-(i+1)].Draw(draw_opt)
draw_opt = "L"
bb0nX_graphs[-1].SetTitle("")
bb0nX_graphs[-1].GetXaxis().SetRangeUser(0, 1.1)
bb0nX_graphs[-1].GetXaxis().SetTitle("Sum e^{-} Energy (E/Q)")
bb0nX_graphs[-1].GetYaxis().SetTitle("dN/dE")
l_majoron = ROOT.TLegend(0.45, 0.77, 0.85, 0.94)
l_majoron.SetFillColor(ROOT.kWhite)
l_majoron.SetNColumns(2)
l_majoron.AddEntry(bb0nX_graphs[0], "0#nu#beta#beta", "l")
l_majoron.AddEntry(bb0nX_graphs[1], "0#nu#beta#beta#chi^{0} (n=1)", "l")
l_majoron.AddEntry(bb0nX_graphs[4], "2#nu#beta#beta (n=5)", "l")
l_majoron.AddEntry(bb0nX_graphs[2], "0#nu#beta#beta#chi^{0} (n=2)", "l")
l_majoron.AddEntry(None, "", "")
l_majoron.AddEntry(bb0nX_graphs[3], "0#nu#beta#beta#chi^{0}(#chi^{0}) (n=3)", "l")
l_majoron.AddEntry(None, "", "")
l_majoron.AddEntry(bb0nX_graphs[5], "0#nu#beta#beta#chi^{0}#chi^{0} (n=7)", "l")
l_majoron.Draw()
dummy = raw_input("Press Enter...")
|
class SubscriptionTracking(object):
def __init__(self, enable=None, text=None, html=None, substitution_tag=None):
self._enable = None
self._text = None
self._html = None
self._substitution_tag = None
if enable is not None:
self.enable = enable
if text is not None:
self.text = text
if html is not None:
self.html = html
if substitution_tag is not None:
self.substitution_tag = substitution_tag
@property
def enable(self):
return self._enable
@enable.setter
def enable(self, value):
self._enable = value
@property
def text(self):
return self._text
@text.setter
def text(self, value):
self._text = value
@property
def html(self):
return self._html
@html.setter
def html(self, value):
self._html = value
@property
def substitution_tag(self):
return self._substitution_tag
@substitution_tag.setter
def substitution_tag(self, value):
self._substitution_tag = value
def get(self):
subscription_tracking = {}
if self.enable is not None:
subscription_tracking["enable"] = self.enable
if self.text is not None:
subscription_tracking["text"] = self.text
if self.html is not None:
subscription_tracking["html"] = self.html
if self.substitution_tag is not None:
subscription_tracking["substitution_tag"] = self.substitution_tag
return subscription_tracking
|
import novaclient
from novaclient.exceptions import NotFound
import novaclient.client
from keystoneauth1 import loading
from keystoneauth1 import session
import neutronclient.v2_0.client
import cinderclient.v2.client
from osc_lib.utils import wait_for_delete
import taskflow.engines
from taskflow.patterns import linear_flow as lf
from taskflow.patterns import graph_flow as gf
from taskflow import task
import logging
import os
import json
import time
NOVACLIENT_VERSION = "2.37"
def get_openstack_nova_client(config):
return get_openstack_clients(config)[0]
def get_openstack_neutron_client(config):
return get_openstack_clients(config)[1]
def get_openstack_cinder_client(config):
return get_openstack_clients(config)[2]
def get_openstack_clients(config):
""" gets a tuple of various openstack clients.
(novaclient, neutronclient, cinderclient).
Caller can pick up one or all of the returned clients.
"""
if config:
if config.get('M2M_CREDENTIAL_STORE'):
logging.debug("loading credentials from %s" % config.get('M2M_CREDENTIAL_STORE'))
m2m_config = json.load(open(config.get('M2M_CREDENTIAL_STORE')))
source_config = m2m_config
else:
logging.debug("using config as provided")
source_config = config
else:
logging.debug("no config, trying environment vars")
source_config = os.environ
os_username = source_config['OS_USERNAME']
os_password = source_config['OS_PASSWORD']
os_tenant_name = source_config['OS_TENANT_NAME']
os_auth_url = source_config['OS_AUTH_URL']
loader = loading.get_plugin_loader('password')
auth = loader.load_from_options(auth_url=os_auth_url,
username=os_username,
password=os_password,
project_name=os_tenant_name
)
sess = session.Session(auth=auth, verify=False)
return (novaclient.client.Client(NOVACLIENT_VERSION,
session=sess),
neutronclient.v2_0.client.Client(session=sess),
cinderclient.v2.client.Client(NOVACLIENT_VERSION, session=sess)
)
def _format_nics(nics):
""" Create a networks data structure for python-novaclient.
**Note** "auto" is the safest default to pass to novaclient
:param nics: either None, one of strings "auto" or "none"or string with a
comma-separated list of nic IDs from OpenStack.
:return: A data structure that can be passed as Nics
"""
if not nics:
return "auto"
if nics == "none":
return "none"
if nics.lower() == "auto":
return "auto"
return [{"net-id": item, "v4-fixed-ip": ""}
for item in nics.strip().split(",")]
class GetServer(task.Task):
def execute(self, server_id, config):
logging.debug("getting server %s" % server_id)
nc = get_openstack_nova_client(config)
return nc.servers.get(server_id)
class GetImage(task.Task):
def execute(self, image_name, config):
logging.debug("getting image %s" % image_name)
nc = get_openstack_nova_client(config)
return nc.glance.find_image(image_name)
def revert(self, *args, **kwargs):
pass
class ListImages(task.Task):
def execute(self, image_name, config):
logging.debug("getting images")
nc = get_openstack_nova_client(config)
return nc.glance.list()
def revert(self, *args, **kwargs):
pass
class GetFlavor(task.Task):
def execute(self, flavor_name, config):
logging.debug("getting flavor %s" % flavor_name)
nc = get_openstack_nova_client(config)
return nc.flavors.find(name=flavor_name)
def revert(self, *args, **kwargs):
pass
class ListFlavors(task.Task):
def execute(self, flavor_name, config):
logging.debug("getting flavors")
nc = get_openstack_nova_client(config)
return nc.flavors.list()
def revert(self, *args, **kwargs):
pass
class CreateSecurityGroup(task.Task):
# note this uses neutron client
secgroup_id = ""
def execute(self, display_name, master_sg_name, config):
logging.debug("create security group %s" % display_name)
security_group_name = display_name
nc = get_openstack_neutron_client(config)
self.secgroup = nc.create_security_group({"security_group": {
"name": security_group_name,
"description": "Security group generated by Pebbles"
}})
self.secgroup_id = self.secgroup["security_group"]["id"]
self.secgroup_name = self.secgroup["security_group"]["name"]
if master_sg_name:
master_sg = nc.find_resource("security_group", master_sg_name)
nc.create_security_group_rule({"security_group_rule": dict(
security_group_id=self.secgroup_id,
protocol='tcp',
ethertype='ipv4',
port_range_min=1,
direction='ingress',
port_range_max=65535,
remote_group_id=master_sg["id"]
)})
nc.create_security_group_rule({"security_group_rule": dict(
security_group_id=self.secgroup_id,
protocol='udp',
ethertype='ipv4',
port_range_min=1,
direction='ingress',
port_range_max=65535,
remote_group_id=master_sg["id"]
)})
nc.create_security_group_rule({"security_group_rule": dict(
security_group_id=self.secgroup_id,
protocol='icmp',
ethertype='ipv4',
port_range_min=1,
direction='ingress',
port_range_max=255,
remote_group_id=master_sg["id"]
)})
logging.info("Created security group %s" % self.secgroup_id)
return self.secgroup_id
def revert(self, config, **kwargs):
logging.debug("revert: delete security group")
nc = get_openstack_neutron_client(config)
nc.delete_security_group(self.secgroup_id)
class CreateRootVolume(task.Task):
def execute(self, display_name, image, root_volume_size, config):
if root_volume_size:
logging.debug("creating a root volume for instance %s from image %s" % (display_name, image))
nc = get_openstack_cinder_client(config)
volume_name = '%s-root' % display_name
volume = nc.volumes.create(
size=root_volume_size,
imageRef=image.id,
name=volume_name
)
self.volume_id = volume.id
retries = 0
while nc.volumes.get(volume.id).status not in ('available',):
logging.debug("...waiting for volume to be ready")
time.sleep(5)
retries += 1
if retries > 30:
raise RuntimeError('Volume creation %s is stuck')
return volume.id
else:
logging.debug("no root volume defined")
return ""
def revert(self, config, **kwargs):
logging.debug("revert: delete root volume")
try:
if getattr(self, 'volume_id', None):
nc = get_openstack_cinder_client(config)
nc.volumes.delete(
nc.volumes.get(self.volume_id))
else:
logging.debug("revert: no volume_id stored, unable to revert")
except Exception as e:
logging.error('revert: deleting volume failed: %s' % e)
class CreateDataVolume(task.Task):
def execute(self, display_name, data_volume_size, data_volume_type, config):
if data_volume_size:
logging.debug("creating a data volume for instance %s, %d" % (display_name, data_volume_size))
nc = get_openstack_cinder_client(config)
volume_name = '%s-data' % display_name
volume = nc.volumes.create(
size=data_volume_size,
name=volume_name,
volume_type=data_volume_type,
)
self.volume_id = volume.id
retries = 0
while nc.volumes.get(volume.id).status not in ('available',):
logging.debug("...waiting for volume to be ready")
time.sleep(5)
retries += 1
if retries > 30:
raise RuntimeError('Volume creation %s is stuck')
return volume.id
else:
logging.debug("no root volume defined")
return None
def revert(self, config, **kwargs):
logging.debug("revert: delete root volume")
try:
if getattr(self, 'volume_id', None):
nc = get_openstack_cinder_client(config)
nc.volumes.delete(
nc.volumes.get(self.volume_id))
else:
logging.debug("revert: no volume_id stored, unable to revert")
except Exception as e:
logging.error('revert: deleting volume failed: %s' % e)
class ProvisionInstance(task.Task):
def execute(self, display_name, image, flavor, security_group, extra_sec_groups,
root_volume_id, nics, userdata, config):
logging.debug("provisioning instance %s" % display_name)
nc = get_openstack_nova_client(config)
sgs = [security_group]
if extra_sec_groups:
sgs.extend(extra_sec_groups)
try:
if len(root_volume_id):
bdm = {'vda': '%s:::1' % (root_volume_id)}
else:
bdm = None
instance = nc.servers.create(
display_name,
image.id,
flavor.id,
key_name=display_name,
security_groups=sgs,
block_device_mapping=bdm,
nics=_format_nics(nics),
userdata=userdata,)
except Exception as e:
logging.error("error provisioning instance: %s" % e)
raise e
self.instance_id = instance.id
logging.debug("instance provisioning successful")
return instance.id
def revert(self, config, **kwargs):
logging.debug("revert: deleting instance %s", kwargs)
try:
if getattr(self, 'instance_id', None):
nc = get_openstack_nova_client(config)
nc.servers.delete(self.instance_id)
else:
logging.debug("revert: no instance_id stored, unable to revert")
except Exception as e:
logging.error('revert: deleting instance failed: %s' % e)
class DeprovisionInstance(task.Task):
def execute(self, server_id, config):
logging.debug("deprovisioning instance %s" % server_id)
nc = get_openstack_nova_client(config)
try:
server = nc.servers.get(server_id)
except NotFound:
logging.warn("Server %s not found" % server_id)
return
if hasattr(server, "security_groups"):
for sg in server.security_groups:
try:
server.remove_security_group(sg['name'])
except:
logging.warn("Unable to remove security group from server (%s)" % sg)
else:
logging.warn("no security groups on server!")
try:
nc.servers.delete(server_id)
wait_for_delete(nc.servers, server_id)
except Exception as e:
logging.warn("Unable to deprovision server %s" % e)
return server.name
def revert(self, *args, **kwargs):
logging.debug("revert: deprovisioning instance failed")
class AllocateIPForInstance(task.Task):
# user beware, i have not done comprehensive testing on this
# but the parts of the refactoring should be correct
# suvileht -2017-08-24
def execute(self, server_id, allocate_public_ip, config):
logging.info("Allocate IP for server %s" % server_id)
novaclient = get_openstack_nova_client(config)
neutronclient = get_openstack_neutron_client(config)
retries = 0
while novaclient.servers.get(server_id).status is "BUILDING" or not novaclient.servers.get(server_id).networks:
logging.debug("...waiting for server to be ready")
time.sleep(5)
retries += 1
if retries > 30:
raise RuntimeError('Server %s is stuck in building' % server_id)
server = novaclient.servers.get(server_id)
if allocate_public_ip:
ips = neutronclient.list_floatingips()
allocated_from_pool = False
free_ips = [ip for ip in ips["floatingips"] if ip["status"] != "ACTIVE"]
if not free_ips:
logging.debug("No allocated free IPs left, trying to allocate one")
try:
# for backwards compatibility reasons we assume the
# network is called "public"
network_id = neutronclient.find_resource("network",
"public")
ip = neutronclient.create_floatingip({
"floating_network_id": network_id})
allocated_from_pool = True
except neutronclient.exceptions.ClientException as e:
logging.warning("Cannot allocate IP, quota exceeded?")
raise e
else:
ip = free_ips[0]["floating_ip_address"]
logging.info("IP assigned IS %s" % ip)
try:
server.add_floating_ip(ip)
except Exception as e:
logging.error(e)
address_data = {
'public_ip': ip,
'allocated_from_pool': allocated_from_pool,
'private_ip': list(server.networks.values())[0][0],
}
else:
address_data = {
'public_ip': None,
'allocated_from_pool': False,
'private_ip': list(server.networks.values())[0][0],
}
return address_data
def revert(self, *args, **kwargs):
pass
class ListInstanceVolumes(task.Task):
def execute(self, server_id, config):
nc = get_openstack_nova_client(config)
return nc.volumes.get_server_volumes(server_id)
def revert(self):
pass
class AttachDataVolume(task.Task):
def execute(self, server_id, data_volume_id, config):
logging.debug("Attach data volume for server %s" % server_id)
if data_volume_id:
nc = get_openstack_nova_client(config)
retries = 0
while nc.servers.get(server_id).status is "BUILDING" or not nc.servers.get(server_id).networks:
logging.debug("...waiting for server to be ready")
time.sleep(5)
retries += 1
if retries > 30:
raise RuntimeError('Server %s is stuck in building' % server_id)
nc.volumes.create_server_volume(server_id, data_volume_id, '/dev/vdc')
def revert(self, *args, **kwargs):
pass
class AddUserPublicKey(task.Task):
def execute(self, display_name, public_key, config):
logging.debug("adding user public key")
nc = get_openstack_nova_client(config)
self.keypair_added = False
nc.keypairs.create(display_name, public_key)
self.keypair_added = True
def revert(self, display_name, public_key, config, **kwargs):
logging.debug("revert: remove user public key")
if getattr(self, 'keypair_added', None):
nc = get_openstack_nova_client(config)
nc.keypairs.find(name=display_name).delete()
class RemoveUserPublicKey(task.Task):
def execute(self, display_name, config):
logging.debug("removing user public key")
nc = get_openstack_nova_client(config)
try:
nc.keypairs.find(name=display_name).delete()
except:
pass
def revert(self, *args, **kwargs):
pass
class DeleteSecurityGroup(task.Task):
def execute(self, server, config):
logging.debug("delete security group")
nc = get_openstack_neutron_client(config)
security_group = nc.find_resource("security_group",
server.name)
try:
if security_group:
nc.delete_security_group(security_group["id"])
except Exception as e:
logging.warn("Could not delete security group: %s" % e)
def revert(self, *args, **kwargs):
pass
class DeleteVolumes(task.Task):
def execute(self, server, config):
nova = get_openstack_nova_client(config)
cinder = get_openstack_cinder_client(config)
for volume in nova.volumes.get_server_volumes(server.id):
retries = 0
while cinder.volumes.get(volume.id).status not in \
('available', 'error'):
logging.debug("...waiting for volume to be ready")
time.sleep(5)
retries += 1
if retries > 30:
raise RuntimeError('Volume %s is stuck' % volume.id)
try:
cinder.volumes.delete(volume.id)
except NotFound:
pass
def revert(self, *args, **kwargs):
pass
def get_provision_flow():
"""
Provisioning flow consisting of three graph flows, each consisting of set of
tasks that can execute in parallel.
Returns tuple consisting of the whole flow and a dictionary including
references to three graph flows for pre-execution customisations.
"""
pre_flow = gf.Flow('PreBootInstance').add(
AddUserPublicKey('add_user_public_key'),
GetImage('get_image', provides='image'),
GetFlavor('get_flavor', provides='flavor'),
CreateRootVolume('create_root_volume', provides='root_volume_id')
)
main_flow = gf.Flow('BootInstance').add(
CreateSecurityGroup('create_security_group', provides='security_group'),
CreateDataVolume('create_data_volume', provides='data_volume_id'),
ProvisionInstance('provision_instance', provides='server_id')
)
post_flow = gf.Flow('PostBootInstance').add(
AllocateIPForInstance('allocate_ip_for_instance', provides='address_data'),
AttachDataVolume('attach_data_volume'),
RemoveUserPublicKey('remove_user_public_key')
)
return (lf.Flow('ProvisionInstance').add(pre_flow, main_flow, post_flow),
{'pre': pre_flow, 'main': main_flow, 'post': post_flow})
def get_deprovision_flow():
pre_flow = gf.Flow('PreDestroyInstance').add(
GetServer('get_server', provides="server")
)
main_flow = gf.Flow('DestroyInstance').add(
DeprovisionInstance('deprovision_instance')
)
post_flow = gf.Flow('PostDestroyInstance').add(
DeleteSecurityGroup('delete_security_group')
)
return (lf.Flow('DeprovisionInstance').add(pre_flow, main_flow, post_flow),
{'pre': pre_flow, 'main': main_flow, 'post': post_flow})
def get_upload_key_flow():
return lf.Flow('UploadKey').add(
AddUserPublicKey('upload_key')
)
class OpenStackService(object):
def __init__(self, config=None):
self._config = config
def provision_instance(self, display_name, image_name, flavor_name, public_key, extra_sec_groups=None,
master_sg_name=None, allocate_public_ip=True, root_volume_size=0,
data_volume_size=0, data_volume_type=None,
nics=None,
userdata=None):
try:
flow, _ = get_provision_flow()
return taskflow.engines.run(flow, engine='parallel', store=dict(
image_name=image_name,
flavor_name=flavor_name,
display_name=display_name,
master_sg_name=master_sg_name,
public_key=public_key,
extra_sec_groups=extra_sec_groups,
allocate_public_ip=allocate_public_ip,
root_volume_size=root_volume_size,
data_volume_size=data_volume_size,
data_volume_type=data_volume_type,
nics=nics,
userdata=userdata,
config=self._config))
except Exception as e:
logging.error(e)
return {'error': 'flow failed due to: %s' % e}
def deprovision_instance(self, server_id, display_name=None, delete_attached_volumes=False):
flow, subflows = get_deprovision_flow()
if delete_attached_volumes:
subflows['main'].add(DeleteVolumes())
try:
return taskflow.engines.run(flow, engine='parallel', store=dict(
server_id=server_id,
config=self._config))
except Exception as e:
logging.error(e)
return {'error': 'flow failed due to: %s' % (e)}
def get_instance_state(self, instance_id):
nc = get_openstack_nova_client(self._config)
return nc.servers.get(instance_id).status
def get_instance_networks(self, instance_id):
nc = get_openstack_nova_client(self._config)
return nc.servers.get(instance_id).networks
def list_images(self):
nc = get_openstack_nova_client(self._config)
return nc.glance.list()
def list_flavors(self):
nc = get_openstack_nova_client(self._config)
return nc.flavors.list()
def upload_key(self, key_name, public_key):
try:
return taskflow.engines.run(
get_upload_key_flow(),
engine='parallel',
store=dict(
config=self._config,
display_name=key_name,
public_key=public_key
)
)
except Exception as e:
logging.error(e)
return {'error': 'flow failed'}
def delete_key(self, key_name):
logging.debug('Deleting key: %s' % key_name)
nc = get_openstack_nova_client(self._config)
try:
key = nc.keypairs.find(name=key_name)
key.delete()
except:
logging.warning('Key not found: %s' % key_name)
def clear_security_group_rules(self, group_id):
nc = get_openstack_neutron_client(self._config)
sg = nc.show_security_group(group_id)
sec_group_rules = sg['security_group']['security_group_rules']
for rule in sec_group_rules:
if rule['direction'] == 'ingress':
nc.delete_security_group_rule(rule['id'])
def create_security_group(self, security_group_name, security_group_description):
nc = get_openstack_neutron_client(self._config)
nc.security_groups.create(
security_group_name,
"Security group generated by Pebbles")
def create_security_group_rule(self, security_group_id, from_port, to_port, cidr, ip_protocol='tcp',
group_id=None):
nc = get_openstack_neutron_client(self._config)
nc.create_security_group_rule({"security_group_rule": dict(
security_group_id=security_group_id,
protocol=ip_protocol,
ethertype='ipv4',
port_range_min=from_port,
direction='ingress',
port_range_max=to_port,
remote_ip_prefix=cidr,
remote_group_id=group_id
)})
|
import unittest
import greatest_common_divisor as gcd
class TestGreatestCommonDivisor(unittest.TestCase):
def setUp(self):
# use tuple of tuples instead of list of tuples because data won't change
# https://en.wikipedia.org/wiki/Algorithm
# a, b, expected
self.test_data = ((12, 8, 4),
(9, 12, 3),
(54, 24, 6),
(3009, 884, 17),
(40902, 24140, 34),
(14157, 5950, 1)
)
def test_greatest_common_divisor_zero(self):
actual = gcd.GreatestCommonDivisor.greatest_common_divisor(12, 0)
self.assertEqual(0, actual)
actual = gcd.GreatestCommonDivisor.greatest_common_divisor(0, 13)
self.assertEqual(0, actual)
actual = gcd.GreatestCommonDivisor.greatest_common_divisor(-5, 13)
self.assertEqual(0, actual)
def test_greatest_common_divisor(self):
for test_case in self.test_data:
expected = test_case[2]
actual = gcd.GreatestCommonDivisor.greatest_common_divisor(test_case[0], test_case[1])
fail_message = str.format("expected {0} but got {1}", expected, actual)
self.assertEqual(expected, actual, fail_message)
def test_next_smaller_divisor(self):
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(8, 8)
self.assertEqual(4, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 12)
self.assertEqual(6, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 6)
self.assertEqual(4, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 4)
self.assertEqual(3, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 3)
self.assertEqual(2, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 2)
self.assertEqual(1, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(12, 1)
self.assertEqual(1, actual)
actual = gcd.GreatestCommonDivisor.next_smaller_divisor(54, 18)
self.assertEqual(9, actual)
if __name__ == "__main__":
unittest.main()
|
"""
Модуль с преднастроенными панелями-деевьями
"""
from __future__ import absolute_import
from m3.actions.urls import get_url
from m3_ext.ui import containers
from m3_ext.ui import controls
from m3_ext.ui import menus
from m3_ext.ui import render_component
from m3_ext.ui.fields import ExtSearchField
class ExtObjectTree(containers.ExtTree):
"""
Панель с деревом для управления списком объектов.
"""
#==========================================================================
# Внутренние классы для ExtObjectTree
#==========================================================================
class TreeContextMenu(menus.ExtContextMenu):
"""
Внутренний класс для удобной работы с контекстным меню дерева
"""
def __init__(self, *args, **kwargs):
super(
ExtObjectTree.TreeContextMenu, self
).__init__(
*args, **kwargs
)
self.menuitem_new = menus.ExtContextMenuItem(
text=u'Новый в корне',
icon_cls='add_item',
handler='contextMenuNewRoot'
)
self.menuitem_new_child = menus.ExtContextMenuItem(
text=u'Новый дочерний',
icon_cls='add_item',
handler='contextMenuNewChild'
)
self.menuitem_edit = menus.ExtContextMenuItem(
text=u'Изменить',
icon_cls='edit_item',
handler='contextMenuEdit'
)
self.menuitem_delete = menus.ExtContextMenuItem(
text=u'Удалить',
icon_cls='delete_item',
handler='contextMenuDelete'
)
self.menuitem_separator = menus.ExtContextMenuSeparator()
self.init_component()
class TreeTopBar(containers.ExtToolBar):
"""
Внутренний класс для удобной работы топбаром грида
"""
def __init__(self, *args, **kwargs):
super(ExtObjectTree.TreeTopBar, self).__init__(*args, **kwargs)
self.button_new = menus.ExtContextMenuItem(
text=u'Новый в корне',
icon_cls='add_item',
handler='topBarNewRoot'
)
self.button_new_child = menus.ExtContextMenuItem(
text=u'Новый дочерний',
icon_cls='add_item',
handler='topBarNewChild'
)
self.button_edit = controls.ExtButton(
text=u'Изменить',
icon_cls='edit_item',
handler='topBarEdit'
)
self.button_delete = controls.ExtButton(
text=u'Удалить',
icon_cls='delete_item',
handler='topBarDelete'
)
self.button_refresh = controls.ExtButton(
text=u'Обновить',
icon_cls='refresh-icon-16',
handler='topBarRefresh'
)
menu = menus.ExtContextMenu()
menu.items.append(self.button_new)
menu.items.append(self.button_new_child)
self.add_menu = containers.ExtToolbarMenu(
icon_cls="add_item",
menu=menu,
text=u'Добавить'
)
self.init_component()
#==========================================================================
# Собственно определение класса ExtObjectTree
#==========================================================================
def __init__(self, *args, **kwargs):
super(ExtObjectTree, self).__init__(*args, **kwargs)
self.template = 'ext-trees/ext-object-tree.js'
#======================================================================
# Действия, выполняемые изнутри грида
#======================================================================
self.action_new = None
self.action_edit = None
self.action_delete = None
self.action_data = None
#======================================================================
# Источник данных для грида
#======================================================================
self.load_mask = True
self.row_id_name = 'id'
self.parent_id_name = 'parent_id'
self.allow_paging = False
#======================================================================
# Контекстное меню и бары дерева
#======================================================================
self.context_menu_row = ExtObjectTree.TreeContextMenu()
self.context_menu_tree = ExtObjectTree.TreeContextMenu()
self.top_bar = ExtObjectTree.TreeTopBar()
self.top_bar.items.append(self.top_bar.add_menu)
self.top_bar.items.append(self.top_bar.button_edit)
self.top_bar.items.append(self.top_bar.button_delete)
self.top_bar.items.append(self.top_bar.button_refresh)
self.dblclick_handler = 'onEditRecord'
# Признак "Сортировки папок"
# если true, то папки всегда будут выше простых элементов
# иначе, сортируются как элементы
self.folder_sort = True
# Возможность сортировки в дереве
self.enable_tree_sort = True
# После редактирования и добавления обновляется только тот узел дерева,
# в котором произошли изменения
self.incremental_update = False
# Список исключений для make_read_only
self._mro_exclude_list = []
self.init_component()
def add_search_field(self):
u"""Добавляет строку поиска в гриде."""
self.top_bar.search_field = ExtSearchField(
empty_text=u'Поиск', width=200, component_for_search=self)
self.top_bar.add_fill()
self.top_bar.items.append(self.top_bar.search_field)
self._mro_exclude_list.append(self.top_bar.search_field)
def render(self):
"""
Переопределяем рендер дерева для того,
чтобы модифицировать содержимое его
панелей и контекстных меню
"""
if self.action_new:
self.context_menu_row.items.append(
self.context_menu_row.menuitem_new)
self.context_menu_row.items.append(
self.context_menu_row.menuitem_new_child)
self.context_menu_tree.items.append(
self.context_menu_tree.menuitem_new)
if self.action_edit:
self.context_menu_row.items.append(
self.context_menu_row.menuitem_edit)
self.handler_dblclick = self.dblclick_handler
if self.action_delete:
self.context_menu_row.items.append(
self.context_menu_row.menuitem_delete)
# контекстное меню прицепляется к гриду только в том случае, если
# в нем есть хотя бы один пункт
if self.context_menu_tree.items:
self.handler_contextmenu = self.context_menu_tree
if self.context_menu_row.items:
self.handler_rowcontextmenu = self.context_menu_row
#======================================================================
# Настройка top bar
#======================================================================
for action, btn in (
(self.action_new, self.top_bar.add_menu),
(self.action_edit, self.top_bar.button_edit),
(self.action_delete, self.top_bar.button_delete),
(self.action_data, self.top_bar.button_refresh),
):
if not action and btn in self.top_bar.items:
self.top_bar.items.remove(btn)
# тонкая настройка self.store
if not self.url and self.action_data:
self.url = get_url(self.action_data)
self.render_base_config()
self.render_params()
return render_component(self)
def render_params(self):
super(ExtObjectTree, self).render_params()
get_url_or_none = lambda x: get_url(x) if x else None
new_url = get_url_or_none(self.action_new)
edit_url = get_url_or_none(self.action_edit)
delete_url = get_url_or_none(self.action_delete)
data_url = get_url_or_none(self.action_data)
context_json = (
self.action_context.json if self.action_context else None
)
self._put_params_value(
'actions', {
'newUrl': new_url,
'editUrl': edit_url,
'deleteUrl': delete_url,
'dataUrl': data_url,
'contextJson': context_json
}
)
self._put_params_value('rowIdName', self.row_id_name)
self._put_params_value('parentIdName', self.parent_id_name)
self._put_params_value('folderSort', self.folder_sort)
self._put_params_value('enableTreeSort', self.enable_tree_sort)
self._put_params_value('incrementalUpdate', self.incremental_update)
def t_render_base_config(self):
return self._get_config_str()
def t_render_params(self):
return self._get_params_str()
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import List, Optional
__version__ = "20.0.dev0"
def main(args=None):
# type: (Optional[List[str]]) -> int
"""This is an internal API only meant for use by pip's own console scripts.
For additional details, see https://github.com/pypa/pip/issues/7498.
"""
from pip._internal.utils.entrypoints import _wrapper
return _wrapper(args)
|
from keras.models import Sequential, model_from_json
from keras.layers import Dense, Dropout, Activation, Flatten, Convolution2D, MaxPooling2D, Lambda, ELU
from keras.layers.normalization import BatchNormalization
from keras.optimizers import Adam
import cv2
import csv
import numpy as np
import os
from random import random
from sklearn.model_selection import train_test_split
DATA_PATH = './data/t1/'
def trans_image(image,steer,trans_range):
#
# Translate image
# Ref: https://chatbotslife.com/using-augmentation-to-mimic-human-driving-496b569760a9#.s1pwczi3q
#
rows, cols, _ = image.shape
tr_x = trans_range*np.random.uniform()-trans_range/2
steer_ang = steer + tr_x/trans_range*2*.2
tr_y = 40*np.random.uniform()-40/2
Trans_M = np.float32([[1,0,tr_x],[0,1,tr_y]])
image_tr = cv2.warpAffine(image,Trans_M,(cols,rows))
return image_tr, steer_ang
def gen_data(X, y, batch_size=128, validation=False):
#
# Generate data for fit_generator
#
gen_start = 0
while True:
features = []
labels = []
if gen_start >= len(y):
gen_start = 0
ending = min(gen_start+batch_size, len(y))
for idx, row in enumerate(y[gen_start:ending]):
center_img = cv2.imread(DATA_PATH + X[gen_start+idx][0].strip())
center_img = cv2.cvtColor(center_img, cv2.COLOR_BGR2HSV)
center_label = float(row[0])
# Augmentation 1: Jitter image
center_img, center_label = trans_image(center_img, center_label, 100)
# Augmentation 2: Occasionally flip straight
if random() > 0.5 and abs(center_label) > 0.1:
center_img = cv2.flip(center_img, 1)
labels.append(-center_label)
else:
labels.append(center_label)
# Augmentation 3: Random brightness
random_bright = .25 + np.random.uniform()
center_img[:,:,2] = center_img[:,:,2]*random_bright
features.append(center_img)
if not validation:
# Augmentation 4: +0.25 to Left Image
left_img = cv2.imread(DATA_PATH + X[gen_start+idx][1].strip())
features.append(left_img)
labels.append(float(row[0]) + 0.15)
# Augmentation 5: -0.25 to Right Image
right_img = cv2.imread(DATA_PATH + X[gen_start+idx][2].strip())
features.append(right_img)
labels.append(float(row[0]) - 0.15)
gen_start += batch_size
features = np.array(features)
labels = np.array(labels)
yield features, labels
def nvidia_model(row=66, col=200, ch=3, dropout=0.3, lr=0.0001):
#
# NVIDIA CNN model
# Ref: https://arxiv.org/abs/1604.07316
#
input_shape = (row, col, ch)
model = Sequential()
model.add(BatchNormalization(axis=1, input_shape=input_shape))
model.add(Convolution2D(24, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(36, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(48, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(64, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(64, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Dropout(dropout))
model.add(Flatten())
model.add(Dense(100))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(50))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(10))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(1))
model.add(Activation('elu'))
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
print(model.summary())
return model
def nvidialite_model(row=33, col=100, ch=3, dropout=0.3, lr=0.0001):
#
# Modified of NVIDIA CNN Model (Dysfunctional)
#
input_shape = (row, col, ch)
model = Sequential()
model.add(BatchNormalization(axis=1, input_shape=input_shape))
model.add(Convolution2D(24, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Convolution2D(36, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Convolution2D(48, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Flatten())
model.add(Dense(100))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(50))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(10))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(1))
model.add(Activation('elu'))
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
print(model.summary())
return model
def load_data(filter=True):
#
# Load and split data
# CSV: center,left,right,steering,throttle,brake,speed
#
total = 0
with open(DATA_PATH + 'driving_log.csv', 'r') as f:
reader = csv.reader(f)
data = [row for row in reader]
data = np.array(data)
X = data[:,[0,1,2]]
y = data[:,[3]]
print('Total samples:', total)
print('Total samples (after filter):', len(X))
return train_test_split(X, y, test_size=0.2, random_state=42)
def load_model(lr=0.001):
#
# Load the existing model and weight
#
with open('model.json', 'r') as jfile:
model = model_from_json(jfile.read())
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
model.load_weights('model.h5')
return model
def main():
# Load data
X_train, X_val, y_train, y_val = load_data()
print('X_train shape:', X_train.shape)
print('X_val shape:', X_val.shape)
# Build model
if 'model.json' in os.listdir():
model = load_model()
else:
model = nvidia_model()
model.fit_generator(gen_data(X_train, y_train),
samples_per_epoch=len(X_train)*3, nb_epoch=8,
validation_data=gen_data(X_val, y_val, validation=True),
nb_val_samples=len(X_val))
# Save model
json = model.to_json()
model.save_weights('model.h5')
with open('model.json', 'w') as f:
f.write(json)
if __name__ == "__main__": main()
|
from __future__ import unicode_literals
import os, sys, json
import webnotes
import webnotes.db
import getpass
from webnotes.model.db_schema import DbManager
from webnotes.model.sync import sync_for
from webnotes.utils import cstr
class Installer:
def __init__(self, root_login, root_password=None, db_name=None, site=None, site_config=None):
make_conf(db_name, site=site, site_config=site_config)
self.site = site
if isinstance(root_password, list):
root_password = root_password[0]
self.make_connection(root_login, root_password)
webnotes.local.conn = self.conn
webnotes.local.session = webnotes._dict({'user':'Administrator'})
self.dbman = DbManager(self.conn)
def make_connection(self, root_login, root_password):
if root_login:
if not root_password:
root_password = webnotes.conf.get("root_password") or None
if not root_password:
root_password = getpass.getpass("MySQL root password: ")
self.root_password = root_password
self.conn = webnotes.db.Database(user=root_login, password=root_password)
def install(self, db_name, source_sql=None, admin_password = 'admin', verbose=0,
force=0):
if force or (db_name not in self.dbman.get_database_list()):
# delete user (if exists)
self.dbman.delete_user(db_name)
else:
raise Exception("Database %s already exists" % (db_name,))
# create user and db
self.dbman.create_user(db_name, webnotes.conf.db_password)
if verbose: print "Created user %s" % db_name
# create a database
self.dbman.create_database(db_name)
if verbose: print "Created database %s" % db_name
# grant privileges to user
self.dbman.grant_all_privileges(db_name, db_name)
if verbose: print "Granted privileges to user %s and database %s" % (db_name, db_name)
# flush user privileges
self.dbman.flush_privileges()
# close root connection
self.conn.close()
webnotes.connect(db_name=db_name, site=self.site)
self.dbman = DbManager(webnotes.conn)
# import in db_name
if verbose: print "Starting database import..."
# get the path of the sql file to import
if not source_sql:
source_sql = os.path.join(os.path.dirname(webnotes.__file__), "..",
'conf', 'Framework.sql')
self.dbman.restore_database(db_name, source_sql, db_name, webnotes.conf.db_password)
if verbose: print "Imported from database %s" % source_sql
self.create_auth_table()
# fresh app
if 'Framework.sql' in source_sql:
if verbose: print "Installing app..."
self.install_app(verbose=verbose)
# update admin password
self.update_admin_password(admin_password)
# create public folder
from webnotes.install_lib import setup_public_folder
setup_public_folder.make(site=self.site)
if not self.site:
from webnotes.build import bundle
bundle(False)
return db_name
def install_app(self, verbose=False):
sync_for("lib", force=True, sync_everything=True, verbose=verbose)
self.import_core_docs()
try:
from startup import install
except ImportError, e:
install = None
if os.path.exists("app"):
sync_for("app", force=True, sync_everything=True, verbose=verbose)
if os.path.exists(os.path.join("app", "startup", "install_fixtures")):
install_fixtures()
# build website sitemap
from website.doctype.website_sitemap_config.website_sitemap_config import build_website_sitemap_config
build_website_sitemap_config()
if verbose: print "Completing App Import..."
install and install.post_import()
if verbose: print "Updating patches..."
self.set_all_patches_as_completed()
self.assign_all_role_to_administrator()
def update_admin_password(self, password):
from webnotes.auth import _update_password
webnotes.conn.begin()
_update_password("Administrator", webnotes.conf.get("admin_password") or password)
webnotes.conn.commit()
def import_core_docs(self):
install_docs = [
# profiles
{'doctype':'Profile', 'name':'Administrator', 'first_name':'Administrator',
'email':'admin@localhost', 'enabled':1},
{'doctype':'Profile', 'name':'Guest', 'first_name':'Guest',
'email':'guest@localhost', 'enabled':1},
# userroles
{'doctype':'UserRole', 'parent': 'Administrator', 'role': 'Administrator',
'parenttype':'Profile', 'parentfield':'user_roles'},
{'doctype':'UserRole', 'parent': 'Guest', 'role': 'Guest',
'parenttype':'Profile', 'parentfield':'user_roles'},
{'doctype': "Role", "role_name": "Report Manager"}
]
webnotes.conn.begin()
for d in install_docs:
bean = webnotes.bean(d)
bean.insert()
webnotes.conn.commit()
def set_all_patches_as_completed(self):
try:
from patches.patch_list import patch_list
except ImportError, e:
print "No patches to update."
return
for patch in patch_list:
webnotes.doc({
"doctype": "Patch Log",
"patch": patch
}).insert()
webnotes.conn.commit()
def assign_all_role_to_administrator(self):
webnotes.bean("Profile", "Administrator").get_controller().add_roles(*webnotes.conn.sql_list("""
select name from tabRole"""))
webnotes.conn.commit()
def create_auth_table(self):
webnotes.conn.sql_ddl("""create table if not exists __Auth (
`user` VARCHAR(180) NOT NULL PRIMARY KEY,
`password` VARCHAR(180) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8""")
def make_conf(db_name=None, db_password=None, site=None, site_config=None):
try:
from werkzeug.exceptions import NotFound
import conf
try:
webnotes.init(site=site)
except NotFound:
pass
if not site and webnotes.conf.site:
site = webnotes.conf.site
if site:
# conf exists and site is specified, create site_config.json
make_site_config(site, db_name, db_password, site_config)
elif os.path.exists("conf.py"):
print "conf.py exists"
else:
# pyc file exists but py doesn't
raise ImportError
except ImportError:
if site:
raise Exception("conf.py does not exist")
else:
# create conf.py
with open(os.path.join("lib", "conf", "conf.py"), "r") as confsrc:
with open("conf.py", "w") as conftar:
conftar.write(confsrc.read() % get_conf_params(db_name, db_password))
webnotes.destroy()
webnotes.init(site=site)
def make_site_config(site, db_name=None, db_password=None, site_config=None):
import conf
if not getattr(conf, "sites_dir", None):
raise Exception("sites_dir missing in conf.py")
site_path = os.path.join(conf.sites_dir, site)
if not os.path.exists(site_path):
os.mkdir(site_path)
site_file = os.path.join(site_path, "site_config.json")
if not os.path.exists(site_file):
if not (site_config and isinstance(site_config, dict)):
site_config = get_conf_params(db_name, db_password)
with open(site_file, "w") as f:
f.write(json.dumps(site_config, indent=1, sort_keys=True))
def get_conf_params(db_name=None, db_password=None):
if not db_name:
db_name = raw_input("Database Name: ")
if not db_name:
raise Exception("Database Name Required")
if not db_password:
from webnotes.utils import random_string
db_password = random_string(16)
return {"db_name": db_name, "db_password": db_password}
def install_fixtures():
print "Importing install fixtures..."
for basepath, folders, files in os.walk(os.path.join("app", "startup", "install_fixtures")):
for f in files:
f = cstr(f)
if f.endswith(".json"):
print "Importing " + f
with open(os.path.join(basepath, f), "r") as infile:
webnotes.bean(json.loads(infile.read())).insert_or_update()
webnotes.conn.commit()
if f.endswith(".csv"):
from core.page.data_import_tool.data_import_tool import import_file_by_path
import_file_by_path(os.path.join(basepath, f), ignore_links = True, overwrite=True)
webnotes.conn.commit()
if os.path.exists(os.path.join("app", "startup", "install_fixtures", "files")):
if not os.path.exists(os.path.join("public", "files")):
os.makedirs(os.path.join("public", "files"))
os.system("cp -r %s %s" % (os.path.join("app", "startup", "install_fixtures", "files", "*"),
os.path.join("public", "files")))
|
import random
class ai:
def __init__(self, actions, responses):
self.IN = actions
self.OUT = responses
def get_act(self, action, valres):
if action in self.IN:
mList = {}
for response in self.OUT:
if self.IN[self.OUT.index(response)] == action and not response in mList:
mList[response] = 1
elif response in mList:
mList[response] += 1
print mList
keys = []
vals = []
for v in sorted(mList.values(), reverse = True):
for k in mList.keys():
if mList[k] == v:
keys.append(k)
vals.append(v)
print keys
print vals
try:
resp = keys[valres]
except:
resp = random.choice(self.OUT)
else:
resp = random.choice(self.OUT)
return resp
def update(ins, outs):
self.IN = ins
self.OUT = outs
def test():
stix = ai(['attack', 'retreat', 'eat', 'attack', 'attack'], ['run', 'cheer', 'share lunch', 'fall', 'run'])
print stix.get_act('attack', 0)
|
"""OPP Hardware interface.
Contains the hardware interface and drivers for the Open Pinball Project
platform hardware, including the solenoid, input, incandescent, and neopixel
boards.
"""
import asyncio
from collections import defaultdict
from typing import Dict, List, Set, Union, Tuple, Optional # pylint: disable-msg=cyclic-import,unused-import
from mpf.core.platform_batch_light_system import PlatformBatchLightSystem
from mpf.core.utility_functions import Util
from mpf.platforms.base_serial_communicator import HEX_FORMAT
from mpf.platforms.interfaces.driver_platform_interface import PulseSettings, HoldSettings
from mpf.platforms.opp.opp_coil import OPPSolenoidCard
from mpf.platforms.opp.opp_incand import OPPIncandCard
from mpf.platforms.opp.opp_modern_lights import OPPModernLightChannel, OPPNeopixelCard, OPPModernMatrixLightsCard
from mpf.platforms.opp.opp_serial_communicator import OPPSerialCommunicator, BAD_FW_VERSION
from mpf.platforms.opp.opp_switch import OPPInputCard
from mpf.platforms.opp.opp_switch import OPPMatrixCard
from mpf.platforms.opp.opp_rs232_intf import OppRs232Intf
from mpf.core.platform import SwitchPlatform, DriverPlatform, LightsPlatform, SwitchSettings, DriverSettings, \
DriverConfig, SwitchConfig, RepulseSettings
MYPY = False
if MYPY: # pragma: no cover
from mpf.platforms.opp.opp_coil import OPPSolenoid # pylint: disable-msg=cyclic-import,unused-import
from mpf.platforms.opp.opp_incand import OPPIncand # pylint: disable-msg=cyclic-import,unused-import
from mpf.platforms.opp.opp_switch import OPPSwitch # pylint: disable-msg=cyclic-import,unused-import
class OppHardwarePlatform(LightsPlatform, SwitchPlatform, DriverPlatform):
"""Platform class for the OPP hardware.
Args:
----
machine: The main ``MachineController`` instance.
"""
__slots__ = ["opp_connection", "serial_connections", "opp_incands", "opp_solenoid", "sol_dict",
"opp_inputs", "inp_dict", "inp_addr_dict", "matrix_inp_addr_dict", "read_input_msg",
"neo_card_dict", "num_gen2_brd", "gen2_addr_arr", "bad_crc", "min_version", "_poll_task",
"config", "_poll_response_received", "machine_type", "opp_commands", "_incand_task", "_light_system",
"matrix_light_cards"]
def __init__(self, machine) -> None:
"""Initialise OPP platform."""
super().__init__(machine)
self.opp_connection = {} # type: Dict[str, OPPSerialCommunicator]
self.serial_connections = set() # type: Set[OPPSerialCommunicator]
self.opp_incands = dict() # type: Dict[str, OPPIncandCard]
self.opp_solenoid = [] # type: List[OPPSolenoidCard]
self.sol_dict = dict() # type: Dict[str, OPPSolenoid]
self.opp_inputs = [] # type: List[Union[OPPInputCard, OPPMatrixCard]]
self.inp_dict = dict() # type: Dict[str, OPPSwitch]
self.inp_addr_dict = dict() # type: Dict[str, OPPInputCard]
self.matrix_inp_addr_dict = dict() # type: Dict[str, OPPMatrixCard]
self.read_input_msg = {} # type: Dict[str, bytes]
self.neo_card_dict = dict() # type: Dict[str, OPPNeopixelCard]
self.matrix_light_cards = dict() # type: Dict[str, OPPModernMatrixLightsCard]
self.num_gen2_brd = 0
self.gen2_addr_arr = {} # type: Dict[str, Dict[int, Optional[int]]]
self.bad_crc = defaultdict(lambda: 0)
self.min_version = defaultdict(lambda: 0xffffffff) # type: Dict[str, int]
self._poll_task = {} # type: Dict[str, asyncio.Task]
self._incand_task = None # type: Optional[asyncio.Task]
self._light_system = None # type: Optional[PlatformBatchLightSystem]
self.features['tickless'] = True
self.config = self.machine.config_validator.validate_config("opp", self.machine.config.get('opp', {}))
self._configure_device_logging_and_debug("OPP", self.config)
self._poll_response_received = {} # type: Dict[str, asyncio.Event]
assert self.log is not None
if self.config['driverboards']:
self.machine_type = self.config['driverboards']
else:
self.machine_type = self.machine.config['hardware']['driverboards'].lower()
if self.machine_type == 'gen1':
raise AssertionError("Original OPP boards not currently supported.")
if self.machine_type == 'gen2':
self.debug_log("Configuring the OPP Gen2 boards")
else:
self.raise_config_error('Invalid driverboards type: {}'.format(self.machine_type), 15)
# Only including responses that should be received
self.opp_commands = {
ord(OppRs232Intf.INV_CMD): self.inv_resp,
ord(OppRs232Intf.EOM_CMD): self.eom_resp,
ord(OppRs232Intf.GET_GEN2_CFG): self.get_gen2_cfg_resp,
ord(OppRs232Intf.READ_GEN2_INP_CMD): self.read_gen2_inp_resp_initial,
ord(OppRs232Intf.GET_VERS_CMD): self.vers_resp,
ord(OppRs232Intf.READ_MATRIX_INP): self.read_matrix_inp_resp_initial,
}
async def initialize(self):
"""Initialise connections to OPP hardware."""
await self._connect_to_hardware()
self.opp_commands[ord(OppRs232Intf.READ_GEN2_INP_CMD)] = self.read_gen2_inp_resp
self.opp_commands[ord(OppRs232Intf.READ_MATRIX_INP)] = self.read_matrix_inp_resp
self._light_system = PlatformBatchLightSystem(self.machine.clock, self._send_multiple_light_update,
self.machine.config['mpf']['default_light_hw_update_hz'],
128)
async def _send_multiple_light_update(self, sequential_brightness_list: List[Tuple[OPPModernLightChannel,
float, int]]):
first_light, _, common_fade_ms = sequential_brightness_list[0]
number_leds = len(sequential_brightness_list)
msg = bytearray()
msg.append(int(ord(OppRs232Intf.CARD_ID_GEN2_CARD) + first_light.addr))
msg.append(OppRs232Intf.SERIAL_LED_CMD_FADE)
msg.append(int(first_light.pixel_num / 256))
msg.append(int(first_light.pixel_num % 256))
msg.append(int(number_leds / 256))
msg.append(int(number_leds % 256))
msg.append(int(common_fade_ms / 256))
msg.append(int(common_fade_ms % 256))
for _, brightness, _ in sequential_brightness_list:
msg.append(int(brightness * 255))
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
cmd = bytes(msg)
if self.debug:
self.debug_log("Set color on %s: %s", first_light.chain_serial, "".join(HEX_FORMAT % b for b in cmd))
self.send_to_processor(first_light.chain_serial, cmd)
async def start(self):
"""Start polling and listening for commands."""
# start polling
for chain_serial in self.read_input_msg:
self._poll_task[chain_serial] = self.machine.clock.loop.create_task(self._poll_sender(chain_serial))
self._poll_task[chain_serial].add_done_callback(Util.raise_exceptions)
# start listening for commands
for connection in self.serial_connections:
await connection.start_read_loop()
if [version for version in self.min_version.values() if version < 0x02010000]:
# if we run any CPUs with firmware prior to 2.1.0 start incands updater
self._incand_task = self.machine.clock.schedule_interval(self.update_incand,
1 / self.config['incand_update_hz'])
self._light_system.start()
def stop(self):
"""Stop hardware and close connections."""
if self._light_system:
self._light_system.stop()
for task in self._poll_task.values():
task.cancel()
self._poll_task = {}
if self._incand_task:
self._incand_task.cancel()
self._incand_task = None
for connections in self.serial_connections:
connections.stop()
self.serial_connections = []
def __repr__(self):
"""Return string representation."""
return '<Platform.OPP>'
def process_received_message(self, chain_serial, msg):
"""Send an incoming message from the OPP hardware to the proper method for servicing.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
if len(msg) >= 1:
# Verify valid Gen2 address
if (msg[0] & 0xe0) == 0x20:
if len(msg) >= 2:
cmd = msg[1]
else:
cmd = OppRs232Intf.ILLEGAL_CMD
# Look for EOM or INV commands
elif msg[0] == ord(OppRs232Intf.INV_CMD) or msg[0] == ord(OppRs232Intf.EOM_CMD):
cmd = msg[0]
else:
cmd = OppRs232Intf.ILLEGAL_CMD
else:
# No messages received, fake an EOM
cmd = OppRs232Intf.EOM_CMD
# Can't use try since it swallows too many errors for now
if cmd in self.opp_commands:
self.opp_commands[cmd](chain_serial, msg)
else:
self.log.warning("Received unknown serial command?%s. (This is "
"very worrisome.)", "".join(HEX_FORMAT % b for b in msg))
# TODO: This means synchronization is lost. Send EOM characters
# until they come back
self.opp_connection[chain_serial].lost_synch()
@staticmethod
def _get_numbers(mask):
number = 0
ref = 1
result = []
while mask > ref:
if mask & ref:
result.append(number)
number += 1
ref = ref << 1
return result
def get_info_string(self):
"""Dump infos about boards."""
if not self.serial_connections:
return "No connection to any CPU board."
infos = "Connected CPUs:\n"
for connection in sorted(self.serial_connections, key=lambda x: x.chain_serial):
infos += " - Port: {} at {} baud. Chain Serial: {}\n".format(connection.port, connection.baud,
connection.chain_serial)
for board_id, board_firmware in self.gen2_addr_arr[connection.chain_serial].items():
if board_firmware is None:
infos += " -> Board: 0x{:02x} Firmware: broken\n".format(board_id)
else:
infos += " -> Board: 0x{:02x} Firmware: 0x{:02x}\n".format(board_id, board_firmware)
infos += "\nIncand cards:\n" if self.opp_incands else ""
card_format_string = " - Chain: {} Board: 0x{:02x} Card: {} Numbers: {}\n"
for incand in self.opp_incands.values():
infos += card_format_string.format(incand.chain_serial, incand.addr,
incand.card_num,
self._get_numbers(incand.mask))
infos += "\nInput cards:\n"
for inputs in self.opp_inputs:
infos += card_format_string.format(inputs.chain_serial, inputs.addr,
inputs.card_num,
self._get_numbers(inputs.mask))
infos += "\nSolenoid cards:\n"
for outputs in self.opp_solenoid:
infos += card_format_string.format(outputs.chain_serial, outputs.addr,
outputs.card_num,
self._get_numbers(outputs.mask))
infos += "\nLEDs:\n" if self.neo_card_dict else ""
for leds in self.neo_card_dict.values():
infos += " - Chain: {} Board: 0x{:02x} Card: {}\n".format(leds.chain_serial, leds.addr, leds.card_num)
infos += "\nMatrix lights:\n" if self.matrix_light_cards else ''
for matrix_light in self.matrix_light_cards.values():
infos += " - Chain: {} Board: 0x{:02x} Card: {} Numbers: 0 - 63\n".format(
matrix_light.chain_serial, matrix_light.addr, matrix_light.card_num)
return infos
async def _connect_to_hardware(self):
"""Connect to each port from the config.
This process will cause the OPPSerialCommunicator to figure out which chains they've connected to
and to register themselves.
"""
port_chain_serial_map = {v: k for k, v in self.config['chains'].items()}
for port in self.config['ports']:
# overwrite serial if defined for port
overwrite_chain_serial = port_chain_serial_map.get(port, None)
if overwrite_chain_serial is None and len(self.config['ports']) == 1:
overwrite_chain_serial = port
comm = OPPSerialCommunicator(platform=self, port=port, baud=self.config['baud'],
overwrite_serial=overwrite_chain_serial)
await comm.connect()
self.serial_connections.add(comm)
for chain_serial, versions in self.gen2_addr_arr.items():
for chain_id, version in versions.items():
if not version:
self.raise_config_error("Could not read version for board {}-{}.".format(chain_serial, chain_id),
16)
if self.min_version[chain_serial] != version:
self.raise_config_error("Version mismatch. Board {}-{} has version {:d}.{:d}.{:d}.{:d} which is not"
" the minimal version "
"{:d}.{:d}.{:d}.{:d}".format(chain_serial, chain_id, (version >> 24) & 0xFF,
(version >> 16) & 0xFF, (version >> 8) & 0xFF,
version & 0xFF,
(self.min_version[chain_serial] >> 24) & 0xFF,
(self.min_version[chain_serial] >> 16) & 0xFF,
(self.min_version[chain_serial] >> 8) & 0xFF,
self.min_version[chain_serial] & 0xFF), 1)
def register_processor_connection(self, serial_number, communicator):
"""Register the processors to the platform.
Args:
----
serial_number: Serial number of chain.
communicator: Instance of OPPSerialCommunicator
"""
self.opp_connection[serial_number] = communicator
def send_to_processor(self, chain_serial, msg):
"""Send message to processor with specific serial number.
Args:
----
chain_serial: Serial of the processor.
msg: Message to send.
"""
self.opp_connection[chain_serial].send(msg)
def update_incand(self):
"""Update all the incandescents connected to OPP hardware.
This is done once per game loop if changes have been made.
It is currently assumed that the UART oversampling will guarantee proper
communication with the boards. If this does not end up being the case,
this will be changed to update all the incandescents each loop.
This is used for board with firmware < 2.1.0
"""
for incand in self.opp_incands.values():
if self.min_version[incand.chain_serial] >= 0x02010000:
continue
whole_msg = bytearray()
# Check if any changes have been made
if incand.old_state is None or (incand.old_state ^ incand.new_state) != 0:
# Update card
incand.old_state = incand.new_state
msg = bytearray()
msg.append(incand.addr)
msg.extend(OppRs232Intf.INCAND_CMD)
msg.extend(OppRs232Intf.INCAND_SET_ON_OFF)
msg.append((incand.new_state >> 24) & 0xff)
msg.append((incand.new_state >> 16) & 0xff)
msg.append((incand.new_state >> 8) & 0xff)
msg.append(incand.new_state & 0xff)
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
whole_msg.extend(msg)
if whole_msg:
# Note: No need to send EOM at end of cmds
send_cmd = bytes(whole_msg)
if self.debug:
self.debug_log("Update incand on %s cmd:%s", incand.chain_serial,
"".join(HEX_FORMAT % b for b in send_cmd))
self.send_to_processor(incand.chain_serial, send_cmd)
@classmethod
def get_coil_config_section(cls):
"""Return coil config section."""
return "opp_coils"
async def get_hw_switch_states(self):
"""Get initial hardware switch states.
This changes switches from active low to active high
"""
hw_states = dict()
for opp_inp in self.opp_inputs:
if not opp_inp.is_matrix:
curr_bit = 1
for index in range(0, 32):
if (curr_bit & opp_inp.mask) != 0:
if (curr_bit & opp_inp.old_state) == 0:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index)] = 1
else:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index)] = 0
curr_bit <<= 1
else:
for index in range(0, 64):
if ((1 << index) & opp_inp.old_state) == 0:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index + 32)] = 1
else:
hw_states[opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index + 32)] = 0
return hw_states
def inv_resp(self, chain_serial, msg):
"""Parse inventory response.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
self.debug_log("Received Inventory Response: %s for %s", "".join(HEX_FORMAT % b for b in msg), chain_serial)
index = 1
self.gen2_addr_arr[chain_serial] = {}
while msg[index] != ord(OppRs232Intf.EOM_CMD):
if (msg[index] & ord(OppRs232Intf.CARD_ID_TYPE_MASK)) == ord(OppRs232Intf.CARD_ID_GEN2_CARD):
self.num_gen2_brd += 1
self.gen2_addr_arr[chain_serial][msg[index]] = None
else:
self.log.warning("Invalid inventory response %s for %s.", msg[index], chain_serial)
index += 1
self.debug_log("Found %d Gen2 OPP boards on %s.", self.num_gen2_brd, chain_serial)
# pylint: disable-msg=too-many-statements
@staticmethod
def eom_resp(chain_serial, msg):
"""Process an EOM.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# An EOM command can be used to resynchronize communications if message synch is lost
def _parse_gen2_board(self, chain_serial, msg, read_input_msg):
has_neo = False
has_sw_matrix = False
has_lamp_matrix = False
wing_index = 0
sol_mask = 0
inp_mask = 0
incand_mask = 0
while wing_index < OppRs232Intf.NUM_G2_WING_PER_BRD:
if msg[2 + wing_index] == ord(OppRs232Intf.WING_SOL):
sol_mask |= (0x0f << (4 * wing_index))
inp_mask |= (0x0f << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_INP):
inp_mask |= (0xff << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_INCAND):
incand_mask |= (0xff << (8 * wing_index))
elif msg[2 + wing_index] in (ord(OppRs232Intf.WING_SW_MATRIX_OUT),
ord(OppRs232Intf.WING_SW_MATRIX_OUT_LOW_WING)):
has_sw_matrix = True
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_NEO):
has_neo = True
inp_mask |= (0xef << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_HI_SIDE_INCAND):
incand_mask |= (0xff << (8 * wing_index))
elif msg[2 + wing_index] == ord(OppRs232Intf.WING_NEO_SOL):
inp_mask |= (0x0e << (8 * wing_index))
sol_mask |= (0x0f << (4 * wing_index))
has_neo = True
elif msg[2 + wing_index] in (ord(OppRs232Intf.WING_LAMP_MATRIX_COL_WING),
ord(OppRs232Intf.WING_LAMP_MATRIX_ROW_WING)):
has_lamp_matrix = True
wing_index += 1
if incand_mask != 0:
card = OPPIncandCard(chain_serial, msg[0], incand_mask, self.machine)
self.opp_incands["{}-{}".format(chain_serial, card.card_num)] = card
if sol_mask != 0:
self.opp_solenoid.append(
OPPSolenoidCard(chain_serial, msg[0], sol_mask, self.sol_dict, self))
if inp_mask != 0:
# Create the input object, and add to the command to read all inputs
self.opp_inputs.append(OPPInputCard(chain_serial, msg[0], inp_mask, self.inp_dict,
self.inp_addr_dict, self))
# Add command to read all inputs to read input message
inp_msg = bytearray()
inp_msg.append(msg[0])
inp_msg.extend(OppRs232Intf.READ_GEN2_INP_CMD)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.extend(OppRs232Intf.calc_crc8_whole_msg(inp_msg))
read_input_msg.extend(inp_msg)
if has_sw_matrix:
# Create the matrix object, and add to the command to read all matrix inputs
self.opp_inputs.append(OPPMatrixCard(chain_serial, msg[0], self.inp_dict,
self.matrix_inp_addr_dict, self))
# Add command to read all matrix inputs to read input message
inp_msg = bytearray()
inp_msg.append(msg[0])
inp_msg.extend(OppRs232Intf.READ_MATRIX_INP)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.append(0)
inp_msg.extend(OppRs232Intf.calc_crc8_whole_msg(inp_msg))
read_input_msg.extend(inp_msg)
if has_neo:
card = OPPNeopixelCard(chain_serial, msg[0], self)
self.neo_card_dict[chain_serial + '-' + card.card_num] = card
if has_lamp_matrix:
card = OPPModernMatrixLightsCard(chain_serial, msg[0], self)
self.matrix_light_cards[chain_serial + '-' + card.card_num] = card
def _bad_crc(self, chain_serial, msg):
"""Show warning and increase counter."""
self.bad_crc[chain_serial] += 1
self.log.warning("Chain: %sMsg contains bad CRC: %s.", chain_serial, "".join(HEX_FORMAT % b for b in msg))
def get_gen2_cfg_resp(self, chain_serial, msg):
"""Process cfg response.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Multiple get gen2 cfg responses can be received at once
self.debug_log("Received Gen2 Cfg Response:%s", "".join(HEX_FORMAT % b for b in msg))
curr_index = 0
read_input_msg = bytearray()
while True:
# check that message is long enough, must include crc8
if len(msg) < curr_index + 7:
self.log.warning("Msg is too short: %s.", "".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
# Verify the CRC8 is correct
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, curr_index, 6)
if msg[curr_index + 6] != ord(crc8):
self._bad_crc(chain_serial, msg)
break
self._parse_gen2_board(chain_serial, msg[curr_index:curr_index + 6], read_input_msg)
if (len(msg) > curr_index + 7) and (msg[curr_index + 7] == ord(OppRs232Intf.EOM_CMD)):
break
if (len(msg) > curr_index + 8) and (msg[curr_index + 8] == ord(OppRs232Intf.GET_GEN2_CFG)):
curr_index += 7
else:
self.log.warning("Malformed GET_GEN2_CFG response:%s.",
"".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
read_input_msg.extend(OppRs232Intf.EOM_CMD)
self.read_input_msg[chain_serial] = bytes(read_input_msg)
self._poll_response_received[chain_serial] = asyncio.Event()
self._poll_response_received[chain_serial].set()
def vers_resp(self, chain_serial, msg):
"""Process version response.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Multiple get version responses can be received at once
self.debug_log("Received Version Response (Chain: %s): %s", chain_serial, "".join(HEX_FORMAT % b for b in msg))
curr_index = 0
while True:
# check that message is long enough, must include crc8
if len(msg) < curr_index + 7:
self.log.warning("Msg is too short (Chain: %s): %s.", chain_serial,
"".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
# Verify the CRC8 is correct
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, curr_index, 6)
if msg[curr_index + 6] != ord(crc8):
self._bad_crc(chain_serial, msg)
break
version = (msg[curr_index + 2] << 24) | \
(msg[curr_index + 3] << 16) | \
(msg[curr_index + 4] << 8) | \
msg[curr_index + 5]
self.debug_log("Firmware version of board 0x%02x (Chain: %s): %d.%d.%d.%d", msg[curr_index], chain_serial,
msg[curr_index + 2], msg[curr_index + 3], msg[curr_index + 4], msg[curr_index + 5])
if msg[curr_index] not in self.gen2_addr_arr[chain_serial]:
self.log.warning("Got firmware response for %s but not in inventory at %s", msg[curr_index],
chain_serial)
else:
self.gen2_addr_arr[chain_serial][msg[curr_index]] = version
if version < self.min_version[chain_serial]:
self.min_version[chain_serial] = version
if version == BAD_FW_VERSION:
raise AssertionError("Original firmware sent only to Brian before adding "
"real version numbers. The firmware must be updated before "
"MPF will work.")
if (len(msg) > curr_index + 7) and (msg[curr_index + 7] == ord(OppRs232Intf.EOM_CMD)):
break
if (len(msg) > curr_index + 8) and (msg[curr_index + 8] == ord(OppRs232Intf.GET_VERS_CMD)):
curr_index += 7
else:
self.log.warning("Malformed GET_VERS_CMD response (Chain %s): %s.", chain_serial,
"".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
break
def read_gen2_inp_resp_initial(self, chain_serial, msg):
"""Read initial switch states.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Verify the CRC8 is correct
if len(msg) < 7:
raise AssertionError("Received too short initial input response: " + "".join(HEX_FORMAT % b for b in msg))
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 6)
if msg[6] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.inp_addr_dict:
self.log.warning("Got input response for invalid card at initial request: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.inp_addr_dict[chain_serial + '-' + str(msg[0])]
new_state = (msg[2] << 24) | \
(msg[3] << 16) | \
(msg[4] << 8) | \
msg[5]
opp_inp.old_state = new_state
def read_gen2_inp_resp(self, chain_serial, msg):
"""Read switch changes.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Single read gen2 input response. Receive function breaks them down
# Verify the CRC8 is correct
if len(msg) < 7:
self.log.warning("Msg too short: %s.", "".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
return
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 6)
if msg[6] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.inp_addr_dict:
self.log.warning("Got input response for invalid card: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.inp_addr_dict[chain_serial + '-' + str(msg[0])]
new_state = (msg[2] << 24) | \
(msg[3] << 16) | \
(msg[4] << 8) | \
msg[5]
# Update the state which holds inputs that are active
changes = opp_inp.old_state ^ new_state
if changes != 0:
curr_bit = 1
for index in range(0, 32):
if (curr_bit & changes) != 0:
if (curr_bit & new_state) == 0:
self.machine.switch_controller.process_switch_by_num(
state=1,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
else:
self.machine.switch_controller.process_switch_by_num(
state=0,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
curr_bit <<= 1
opp_inp.old_state = new_state
# we can continue to poll
self._poll_response_received[chain_serial].set()
def read_matrix_inp_resp_initial(self, chain_serial, msg):
"""Read initial matrix switch states.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Verify the CRC8 is correct
if len(msg) < 11:
raise AssertionError("Received too short initial input response: " + "".join(HEX_FORMAT % b for b in msg))
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 10)
if msg[10] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.matrix_inp_addr_dict:
self.log.warning("Got input response for invalid matrix card at initial request: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.matrix_inp_addr_dict[chain_serial + '-' + str(msg[0])]
opp_inp.old_state = ((msg[2] << 56) | (msg[3] << 48) | (msg[4] << 40) | (msg[5] << 32) |
(msg[6] << 24) | (msg[7] << 16) | (msg[8] << 8) | msg[9])
# pylint: disable-msg=too-many-nested-blocks
def read_matrix_inp_resp(self, chain_serial, msg):
"""Read matrix switch changes.
Args:
----
chain_serial: Serial of the chain which received the message.
msg: Message to parse.
"""
# Single read gen2 input response. Receive function breaks them down
# Verify the CRC8 is correct
if len(msg) < 11:
self.log.warning("Msg too short: %s.", "".join(HEX_FORMAT % b for b in msg))
self.opp_connection[chain_serial].lost_synch()
return
crc8 = OppRs232Intf.calc_crc8_part_msg(msg, 0, 10)
if msg[10] != ord(crc8):
self._bad_crc(chain_serial, msg)
else:
if chain_serial + '-' + str(msg[0]) not in self.matrix_inp_addr_dict:
self.log.warning("Got input response for invalid matrix card: %s. Msg: %s.", msg[0],
"".join(HEX_FORMAT % b for b in msg))
return
opp_inp = self.matrix_inp_addr_dict[chain_serial + '-' + str(msg[0])]
new_state = ((msg[2] << 56) | (msg[3] << 48) | (msg[4] << 40) | (msg[5] << 32) |
(msg[6] << 24) | (msg[7] << 16) | (msg[8] << 8) | msg[9])
changes = opp_inp.old_state ^ new_state
if changes != 0:
curr_bit = 1
for index in range(32, 96):
if (curr_bit & changes) != 0:
if (curr_bit & new_state) == 0:
self.machine.switch_controller.process_switch_by_num(
state=1,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
else:
self.machine.switch_controller.process_switch_by_num(
state=0,
num=opp_inp.chain_serial + '-' + opp_inp.card_num + '-' + str(index),
platform=self)
curr_bit <<= 1
opp_inp.old_state = new_state
# we can continue to poll
self._poll_response_received[chain_serial].set()
def _get_dict_index(self, input_str):
if not isinstance(input_str, str):
self.raise_config_error("Invalid number format for OPP. Number should be card-number or chain-card-number "
"(e.g. 0-1)", 2)
try:
chain_str, card_str, number_str = input_str.split("-")
except ValueError:
if len(self.serial_connections) > 1:
self.raise_config_error("You need to specify a chain as chain-card-number in: {}".format(input_str), 17)
else:
chain_str = list(self.serial_connections)[0].chain_serial
try:
card_str, number_str = input_str.split("-")
except ValueError:
card_str = '0'
number_str = input_str
if chain_str not in self.opp_connection:
self.raise_config_error("Chain {} does not exist. Existing chains: {}".format(
chain_str, list(self.opp_connection.keys())), 3)
return chain_str + "-" + card_str + "-" + number_str
def configure_driver(self, config: DriverConfig, number: str, platform_settings: dict):
"""Configure a driver.
Args:
----
config: Config dict.
number: Number of this driver.
platform_settings: Platform specific settings.
"""
if not self.opp_connection:
self.raise_config_error("A request was made to configure an OPP solenoid, "
"but no OPP connection is available", 4)
number = self._get_dict_index(number)
if number not in self.sol_dict:
self.raise_config_error("A request was made to configure an OPP solenoid "
"with number {} which doesn't exist".format(number), 5)
# Use new update individual solenoid command
opp_sol = self.sol_dict[number]
opp_sol.config = config
opp_sol.platform_settings = platform_settings
if self.debug:
self.debug_log("Configure driver %s", number)
default_pulse = PulseSettings(config.default_pulse_power, config.default_pulse_ms)
default_hold = HoldSettings(config.default_hold_power)
opp_sol.reconfigure_driver(default_pulse, default_hold)
# Removing the default input is not necessary since the
# CFG_SOL_USE_SWITCH is not being set
return opp_sol
def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict):
"""Configure a switch.
Args:
----
number: Number of this switch.
config: Config dict.
platform_config: Platform specific settings.
"""
del platform_config
del config
# A switch is termed as an input to OPP
if not self.opp_connection:
self.raise_config_error("A request was made to configure an OPP switch, "
"but no OPP connection is available", 6)
number = self._get_dict_index(number)
if number not in self.inp_dict:
self.raise_config_error("A request was made to configure an OPP switch "
"with number {} which doesn't exist".format(number), 7)
return self.inp_dict[number]
def parse_light_number_to_channels(self, number: str, subtype: str):
"""Parse number and subtype to channel."""
if subtype in ("matrix", "incand"):
return [
{
"number": self._get_dict_index(number)
}
]
if not subtype or subtype == "led":
full_index = self._get_dict_index(number)
chain_serial, card, index = full_index.split('-')
number_format = "{}-{}-{}"
return [
{
"number": number_format.format(chain_serial, card, int(index) * 3)
},
{
"number": number_format.format(chain_serial, card, int(index) * 3 + 1)
},
{
"number": number_format.format(chain_serial, card, int(index) * 3 + 2)
},
]
self.raise_config_error("Unknown subtype {}".format(subtype), 8)
return []
def configure_light(self, number, subtype, config, platform_settings):
"""Configure a led or matrix light."""
del config
if not self.opp_connection:
self.raise_config_error("A request was made to configure an OPP light, "
"but no OPP connection is available", 9)
chain_serial, card, light_num = number.split('-')
index = chain_serial + '-' + card
if not subtype or subtype == "led":
if index not in self.neo_card_dict:
self.raise_config_error("A request was made to configure an OPP neopixel "
"with card number {} which doesn't exist".format(card), 10)
if not self.neo_card_dict[index].is_valid_light_number(light_num):
self.raise_config_error("A request was made to configure an OPP neopixel "
"with card number {} but number '{}' is "
"invalid".format(card, light_num), 22)
light = OPPModernLightChannel(chain_serial, int(card), int(light_num), self._light_system)
self._light_system.mark_dirty(light)
return light
if subtype == "matrix" and self.min_version[chain_serial] >= 0x02010000:
# modern matrix lights
if index not in self.matrix_light_cards:
self.raise_config_error("A request was made to configure an OPP matrix light "
"with card number {} which doesn't exist".format(card), 18)
if not self.matrix_light_cards[index].is_valid_light_number(light_num):
self.raise_config_error("A request was made to configure an OPP matrix light "
"with card number {} but number '{}' is "
"invalid".format(card, light_num), 19)
light = OPPModernLightChannel(chain_serial, int(card), int(light_num) + 0x2000, self._light_system)
self._light_system.mark_dirty(light)
return light
if subtype in ("incand", "matrix"):
if index not in self.opp_incands:
self.raise_config_error("A request was made to configure an OPP incand light "
"with card number {} which doesn't exist".format(card), 20)
if not self.opp_incands[index].is_valid_light_number(light_num):
self.raise_config_error("A request was made to configure an OPP incand light "
"with card number {} but number '{}' is "
"invalid".format(card, light_num), 21)
if self.min_version[chain_serial] >= 0x02010000:
light = self.opp_incands[index].configure_modern_fade_incand(light_num, self._light_system)
self._light_system.mark_dirty(light)
return light
# legacy incands with new or old subtype
return self.opp_incands[index].configure_software_fade_incand(light_num)
self.raise_config_error("Unknown subtype {}".format(subtype), 12)
return None
async def _poll_sender(self, chain_serial):
"""Poll switches."""
if len(self.read_input_msg[chain_serial]) <= 1:
# there is no point in polling without switches
return
while True:
# wait for previous poll response
timeout = 1 / self.config['poll_hz'] * 25
try:
await asyncio.wait_for(self._poll_response_received[chain_serial].wait(), timeout)
except asyncio.TimeoutError:
self.log.warning("Poll took more than %sms for %s", timeout * 1000, chain_serial)
else:
self._poll_response_received[chain_serial].clear()
# send poll
self.send_to_processor(chain_serial, self.read_input_msg[chain_serial])
await self.opp_connection[chain_serial].writer.drain()
# the line above saturates the link and seems to overwhelm the hardware. limit it to 100Hz
await asyncio.sleep(1 / self.config['poll_hz'])
def _verify_coil_and_switch_fit(self, switch, coil):
chain_serial, card, solenoid = coil.hw_driver.number.split('-')
sw_chain_serial, sw_card, sw_num = switch.hw_switch.number.split('-')
if self.min_version[chain_serial] >= 0x20000:
if chain_serial != sw_chain_serial or card != sw_card:
self.raise_config_error('Invalid switch being configured for driver. Driver = {} '
'Switch = {}. Driver and switch have to be on the same '
'board.'.format(coil.hw_driver.number, switch.hw_switch.number), 13)
else:
matching_sw = ((int(solenoid) & 0x0c) << 1) | (int(solenoid) & 0x03)
if chain_serial != sw_chain_serial or card != sw_card or matching_sw != int(sw_num):
self.raise_config_error('Invalid switch being configured for driver. Driver = {} '
'Switch = {}. For Firmware < 0.2.0 they have to be on the same board and '
'have the same number'.format(coil.hw_driver.number, switch.hw_switch.number),
14)
def set_pulse_on_hit_rule(self, enable_switch: SwitchSettings, coil: DriverSettings):
"""Set pulse on hit rule on driver.
Pulses a driver when a switch is hit. When the switch is released the pulse continues. Typically used for
autofire coils such as pop bumpers.
"""
self._write_hw_rule(enable_switch, coil, use_hold=False, can_cancel=False)
def set_delayed_pulse_on_hit_rule(self, enable_switch: SwitchSettings, coil: DriverSettings, delay_ms: int):
"""Set pulse on hit and release rule to driver.
When a switch is hit and a certain delay passed it pulses a driver.
When the switch is released the pulse continues.
Typically used for kickbacks.
"""
if delay_ms <= 0:
raise AssertionError("set_delayed_pulse_on_hit_rule should be used with a positive delay "
"not {}".format(delay_ms))
if delay_ms > 255:
raise AssertionError("set_delayed_pulse_on_hit_rule is limited to max 255ms "
"(was {})".format(delay_ms))
self._write_hw_rule(enable_switch, coil, use_hold=False, can_cancel=False, delay_ms=int(delay_ms))
def set_pulse_on_hit_and_release_rule(self, enable_switch: SwitchSettings, coil: DriverSettings):
"""Set pulse on hit and release rule to driver.
Pulses a driver when a switch is hit. When the switch is released the pulse is canceled. Typically used on
the main coil for dual coil flippers without eos switch.
"""
self._write_hw_rule(enable_switch, coil, use_hold=False, can_cancel=True)
def set_pulse_on_hit_and_enable_and_release_rule(self, enable_switch: SwitchSettings, coil: DriverSettings):
"""Set pulse on hit and enable and relase rule on driver.
Pulses a driver when a switch is hit. Then enables the driver (may be with pwm). When the switch is released
the pulse is canceled and the driver gets disabled. Typically used for single coil flippers.
"""
self._write_hw_rule(enable_switch, coil, use_hold=True, can_cancel=True)
def set_pulse_on_hit_and_release_and_disable_rule(self, enable_switch: SwitchSettings,
eos_switch: SwitchSettings, coil: DriverSettings,
repulse_settings: Optional[RepulseSettings]):
"""Set pulse on hit and release and disable rule on driver.
Pulses a driver when a switch is hit. Then enables the driver (may be with pwm). When the switch is released
the pulse is canceled and the driver gets disabled. When the second disable_switch is hit the pulse is canceled
and the driver gets disabled. Typically used on the main coil for dual coil flippers with eos switch.
"""
raise AssertionError("Not implemented in OPP currently")
def set_pulse_on_hit_and_enable_and_release_and_disable_rule(self, enable_switch: SwitchSettings,
eos_switch: SwitchSettings, coil: DriverSettings,
repulse_settings: Optional[RepulseSettings]):
"""Set pulse on hit and enable and release and disable rule on driver.
Pulses a driver when a switch is hit. Then enables the driver (may be with pwm). When the switch is released
the pulse is canceled and the driver becomes disabled. When the eos_switch is hit the pulse is canceled
and the driver becomes enabled (likely with PWM).
Typically used on the coil for single-wound coil flippers with eos switch.
"""
raise AssertionError("Not implemented in OPP currently")
# pylint: disable-msg=too-many-arguments
def _write_hw_rule(self, switch_obj: SwitchSettings, driver_obj: DriverSettings, use_hold, can_cancel,
delay_ms=None):
if switch_obj.invert:
raise AssertionError("Cannot handle inverted switches")
if driver_obj.hold_settings and not use_hold:
raise AssertionError("Invalid call")
self._verify_coil_and_switch_fit(switch_obj, driver_obj)
self.debug_log("Setting HW Rule. Driver: %s", driver_obj.hw_driver.number)
driver_obj.hw_driver.switches.append(switch_obj.hw_switch.number)
driver_obj.hw_driver.set_switch_rule(driver_obj.pulse_settings, driver_obj.hold_settings, driver_obj.recycle,
can_cancel, delay_ms)
_, _, switch_num = switch_obj.hw_switch.number.split("-")
switch_num = int(switch_num)
self._add_switch_coil_mapping(switch_num, driver_obj.hw_driver)
def _remove_switch_coil_mapping(self, switch_num, driver: "OPPSolenoid"):
"""Remove mapping between switch and coil."""
if self.min_version[driver.sol_card.chain_serial] < 0x20000:
return
_, _, coil_num = driver.number.split('-')
# mirror switch matrix columns to handle the fact that OPP matrix is in reverse column order
if switch_num >= 32:
switch_num = 8 * (15 - (switch_num // 8)) + switch_num % 8
msg = bytearray()
msg.append(driver.sol_card.addr)
msg.extend(OppRs232Intf.SET_SOL_INP_CMD)
msg.append(int(switch_num))
msg.append(int(coil_num) + ord(OppRs232Intf.CFG_SOL_INP_REMOVE))
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
msg.extend(OppRs232Intf.EOM_CMD)
final_cmd = bytes(msg)
if self.debug:
self.debug_log("Unmapping input %s and coil %s on %s", switch_num, coil_num, driver.sol_card.chain_serial)
self.send_to_processor(driver.sol_card.chain_serial, final_cmd)
def _add_switch_coil_mapping(self, switch_num, driver: "OPPSolenoid"):
"""Add mapping between switch and coil."""
if self.min_version[driver.sol_card.chain_serial] < 0x20000:
return
_, _, coil_num = driver.number.split('-')
# mirror switch matrix columns to handle the fact that OPP matrix is in reverse column order
if switch_num >= 32:
switch_num = 8 * (15 - (switch_num // 8)) + switch_num % 8
msg = bytearray()
msg.append(driver.sol_card.addr)
msg.extend(OppRs232Intf.SET_SOL_INP_CMD)
msg.append(int(switch_num))
msg.append(int(coil_num))
msg.extend(OppRs232Intf.calc_crc8_whole_msg(msg))
msg.extend(OppRs232Intf.EOM_CMD)
final_cmd = bytes(msg)
if self.debug:
self.debug_log("Mapping input %s and coil %s on %s", switch_num, coil_num, driver.sol_card.chain_serial)
self.send_to_processor(driver.sol_card.chain_serial, final_cmd)
def clear_hw_rule(self, switch: SwitchSettings, coil: DriverSettings):
"""Clear a hardware rule.
This is used if you want to remove the linkage between a switch and
some driver activity. For example, if you wanted to disable your
flippers (so that a player pushing the flipper buttons wouldn't cause
the flippers to flip), you'd call this method with your flipper button
as the *sw_num*.
"""
if switch.hw_switch.number in coil.hw_driver.switches:
if self.debug:
self.debug_log("Clearing HW Rule for switch: %s, coils: %s", switch.hw_switch.number,
coil.hw_driver.number)
coil.hw_driver.switches.remove(switch.hw_switch.number)
_, _, switch_num = switch.hw_switch.number.split("-")
switch_num = int(switch_num)
self._remove_switch_coil_mapping(switch_num, coil.hw_driver)
# disable rule if there are no more switches
# Technically not necessary unless the solenoid parameters are
# changing. MPF may not know when initial kick and hold values
# are changed, so this might need to be called each time.
if not coil.hw_driver.switches:
coil.hw_driver.remove_switch_rule()
|
from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def __unicode__(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
|
import json
from app import models
from django.test import Client, TestCase
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class TestLecturerWeb(TestCase):
def _init_test_lecturer(self):
if hasattr(self, '_lecturer'):
return
self.lecturer = "lecturer_oUP1zwTO9"
self.lecturer_pswd = "123"
user_data = {
'password': make_password(self.lecturer_pswd),
'is_staff': False,
'is_superuser': False,
}
user, _ = User.objects.get_or_create(username=self.lecturer,
defaults=user_data)
_lecturer, _ = models.Lecturer.objects.get_or_create(
user=user,
defaults={
"subject": models.Subject.get_english(),
"name": "kaoru"
})
self._lecturer = _lecturer
def setUp(self):
self.client = Client()
self._init_test_lecturer()
self.client.login(username=self.lecturer, password=self.lecturer_pswd)
def tearDown(self):
pass
def test_home(self):
response = self.client.get(reverse('lecturer:home'))
self.assertEqual(302, response.status_code)
def test_index(self):
response = self.client.get(reverse('lecturer:index'))
self.assertEqual(200, response.status_code)
def test_login(self):
client = Client()
response = client.get(reverse('lecturer:login'))
self.assertEqual(200, response.status_code)
def test_login_auth(self):
client = Client()
data = {'username': self.lecturer, 'password': self.lecturer_pswd}
response = client.post(reverse('lecturer:login'), data=data)
self.assertEqual(302, response.status_code)
def test_logout(self):
client = Client()
client.login(username=self.lecturer, password=self.lecturer_pswd)
response = client.get(reverse('lecturer:logout'))
self.assertEqual(302, response.status_code)
def test_timeslots(self):
response = self.client.get(reverse('lecturer:timeslots'))
self.assertEqual(200, response.status_code)
def test_living(self):
response = self.client.get(reverse('lecturer:living'))
self.assertEqual(200, response.status_code)
def test_timeslot_questions(self):
response = self.client.get(
reverse('lecturer:timeslot-questions', kwargs={'tsid': 1}))
self.assertEqual(200, response.status_code)
# update test
response = self.client.post(
reverse('lecturer:timeslot-questions', kwargs={'tsid': 0}),
data={'gids': ''}
)
self.assertEqual(404, response.status_code)
# TODO: create test LiveCourse
def test_exercise_store(self):
response = self.client.get(reverse('lecturer:exercise-store'))
self.assertEqual(200, response.status_code)
data = {
"group": '{"exercises":[{"analyse":"题目解析","solution":"选项1","id":"","title":"题目","options":[{"text":"选项1","id":""},{"text":"选项2","id":""},{"text":"选项3","id":""},{"text":"选项4","id":""}]}],"desc":"题组描述","id":"","title":"题组名称"}'}
response = self.client.post(reverse('lecturer:exercise-store'), data)
self.assertEqual(200, response.status_code)
def test_api_exercise_store(self):
url = reverse('lecturer:api-exercise-store')
response = self.client.get(url)
self.assertEqual(200, response.status_code)
url = reverse('lecturer:api-exercise-store') + '?action=group_list'
response = self.client.get(url)
self.assertEqual(200, response.status_code)
url = reverse('lecturer:api-exercise-store') + '?action=group&gid=1'
response = self.client.get(url)
self.assertEqual(200, response.status_code)
|
from settings.common import Common
class Dev(Common):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'ffrpg.sql', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.