code
stringlengths 1
199k
|
|---|
from __future__ import division
import abc
import numpy as n
import scipy.linalg as linalg
import scipy.optimize as opt
import scipy.spatial.distance as dist
class Feature(object):
'''
Abstract class that represents a feature to be used
with :py:class:`pyransac.ransac.RansacFeature`
'''
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self):
pass
@abc.abstractproperty
def min_points(self):
'''int: Minimum number of points needed to define the feature.'''
pass
@abc.abstractmethod
def points_distance(self,points):
'''
This function implements a method to compute the distance
of points from the feature.
Args:
points (numpy.ndarray): a numpy array of points the distance must be
computed of.
Returns:
distances (numpy.ndarray): the computed distances of the points from the feature.
'''
pass
@abc.abstractmethod
def print_feature(self,num_points):
'''
This method returns an array of x,y coordinates for
points that are in the feature.
Args:
num_points (numpy.ndarray): the number of points to be returned
Returns:
coords (numpy.ndarray): a num_points x 2 numpy array that contains
the points coordinates
'''
class Circle(Feature):
'''
Feature class for a Circle :math:`(x-x_c)^2 + (y-y_c)^2 - r = 0`
'''
min_points = 3
'''int: Minimum number of points needed to define the circle (3).'''
def __init__(self,points):
self.radius,self.xc,self.yc = self.__gen(points)
def __gen(self,points):
'''
Compute the radius and the center coordinates of a
circumference given three points
Args:
points (numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
(tuple): A 3 elements tuple that contains the circumference radius
and center coordinates [radius,xc,yc]
Raises:
RuntimeError: If the circle computation does not succeed
a RuntimeError is raised.
'''
# Linear system for (D,E,F) in circle
# equations: D*xi + E*yi + F = -(xi**2 + yi**2)
# where xi, yi are the coordinate of the i-th point.
# Generating A matrix
A = n.array([(x,y,1) for x,y in points])
# Generating rhs
rhs = n.array([-(x**2+y**2) for x,y in points])
try:
#Solving linear system
D,E,F = linalg.lstsq(A,rhs)[0]
except linalg.LinAlgError:
raise RuntimeError('Circle calculation not successful. Please\
check the input data, probable collinear points')
xc = -D/2
yc = -E/2
r = n.sqrt(xc**2+yc**2-F)
return (r,xc,yc)
def points_distance(self,points):
r'''
Compute the distance of the points from the feature
:math:`d = \left| \sqrt{(x_i - x_c)^2 + (y_i-y_c)^2} - r \right|`
Args:
points (numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
d (numpy.ndarray): the computed distances of the points from the feature.
'''
xa = n.array([self.xc,self.yc]).reshape((1,2))
d = n.abs(dist.cdist(points,xa) - self.radius)
return d
def print_feature(self, num_points):
'''
This method returns an array of x,y coordinates for
points that are in the feature.
Args:
num_points (numpy.ndarray): the number of points to be returned
Returns:
coords (numpy.ndarray): a num_points x 2 numpy array that contains
the points coordinates
'''
theta = n.linspace(0,2*n.pi,num_points)
x = self.xc + self.radius*n.cos(theta)
y = self.yc + self.radius*n.sin(theta)
return n.vstack((x,y))
class Exponential (Feature):
'''
Feature Class for an exponential curve :math:`y=ax^{k} + b`
'''
min_points = 3
def __init__(self,points):
self.a,self.k,self.b = self.__gen(points)
def __gen(self,points):
'''
Compute the three parameters that univocally determine the
exponential curve
Args:
points(numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
exp(numpy.ndarray): A (3,) numpy array that contains the a,n,b parameters
[a,k,b]
Raises:
RuntimeError: If the circle computation does not succeed
a RuntimeError is raised.
'''
def exponential(x,points):
''' Non linear system function to use
with :py:func:`scypy.optimize.root`
'''
aa = x[0]
nn = x[1]
bb = x[2]
f = n.zeros((3,))
f[0] = n.abs(aa)*n.power(points[0,0],nn)+bb - points[0,1]
f[1] = n.abs(aa)*n.power(points[1,0],nn)+bb - points[1,1]
f[2] = n.abs(aa)*n.power(points[2,0],nn)+bb - points[2,1]
return f
exp = opt.root(exponential,[1,1,1],points,method='lm')['x']
return exp
def points_distance(self,points):
r'''
Compute the distance of the points from the feature
:math:`d = \sqrt{(x_i - x_c)^2 + (y_i-y_c)^2}`
Args:
points (numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
d (numpy.ndarray): the computed distances of the points from the feature.
'''
x = points[:,0]
xa = n.array([x,self.a*n.power(x,self.k)+self.b])
xa = xa.T
d = dist.cdist(points,xa)
return n.diag(d)
def print_feature(self, num_points, a,b):
'''
This method returns an array of x,y coordinates for
points that are in the feature in the interval [a,b].
Args:
num_points (numpy.ndarray): the number of points to be returned
a (float): left end of the interval
b (float): right end of the interval
Returns:
coords (numpy.ndarray): a num_points x 2 numpy array that contains
the points coordinates
'''
x = n.linspace(a,b,num_points)
y = self.a*x**self.k + self.b
return n.vstack((x,y))
|
"""Klamp't visualization routines. See Python/demos/vistemplate.py for an
example of how to run this module.
The visualization module lets you draw most Klamp't objects in a 3D world
using a simple interface. It also lets you customize the GUI using Qt
widgets, OpenGL drawing, and keyboard/mouse intercept routines.
Main features include:
- Simple interface to modify the visualization
- Simple interface to animate and render trajectories
- Simple interface to edit certain Klamp't objects (configurations, points,
transforms)
- Simple interface to drawing text and text labels, and drawing plots
- Multi-window, multi-viewport support
- Unified interface to PyQt and GLUT (with loss of resource editing functionality
under GLUT)
- Automatic camera setup
The resource editing functionality in the klampt.io.resource module (based on
klampt.vis.editors) use this module as well.
Due to weird OpenGL and Qt behavior in multi-threaded programs, you should
only run visualizations using the methods in this module.
There are two primary ways of setting up a visualization:
- The first is by adding items to the visualization world and customizing them
using the vis.X routines that mirror the methods in VisualizationPlugin (like
add, setColor, animate, etc). See Python/demos/vistemplate.py for more information.
- The second is by creating a subclass of GLPluginInterface and doing
all the necessary drawing / interaction yourself inside its hooks. In the
latter case, you will call vis.setPlugin(plugin) to override the default
visualization behavior before creating your window. See Python/demos/visplugin.py
for more information.
A third way of setting up a visualization is a hybrid of the two, where you can
add functionality on top of default the visualization world. You can either use
vis.pushPlugin(plugin) in which case your plugin adds additional functionality,
or you can subclass the vis.VisualizationPlugin class, and selectively augment /
override the default functionality.
Instructions:
- To add things to the default visualization:
Call the VisualizationPlugin aliases (add, animate, setColor, etc)
- To show the visualization and quit when the user closes the window:
vis.run()
- To show the visualization and return when the user closes the window:
vis.dialog()
... do stuff afterwards ...
vis.kill()
- To show the visualization and be able to run a script alongside it
until the user closes the window:
vis.show()
while vis.shown():
vis.lock()
... do stuff ...
[to exit the loop call show(False)]
vis.unlock()
time.sleep(dt)
... do stuff afterwards ...
vis.kill()
- To run a window with a custom plugin (GLPluginInterface) and terminate on
closure:
vis.run(plugin)
- To show a dialog or parallel window
vis.setPlugin(plugin)
... then call
vis.dialog()
... or
vis.show()
... do stuff afterwards ...
vis.kill()
- To add a GLPluginInterface that just customizes a few things on top of
the default visualization:
vis.pushPlugin(plugin)
vis.dialog()
vis.popPlugin()
- To run plugins side-by-side in the same window:
vis.setPlugin(plugin1)
vis.addPlugin(plugin2) #this creates a new split-screen
vis.dialog()
... or
vis.show()
... do stuff afterwards ...
vis.kill()
- To run a custom dialog in a QtWindow
vis.setPlugin([desired plugin or None for visualization])
vis.setParent(qt_window)
vis.dialog()
... or
vis.show()
... do stuff afterwards ...
vis.kill()
- To launch a second window after the first is closed: just call whatever you
want again. Note: if show was previously called with a plugin and you wish to
revert to the default visualization, you should call setPlugin(None) first to
restore the default.
- To create a separate window with a given plugin:
w1 = vis.createWindow() #w1=0
show()
w2 = vis.createWindow() #w2=1
vis.setPlugin(plugin)
vis.dialog()
#to restore commands to the original window
vis.setWindow(w1)
while vis.shown():
...
vis.kill()
Note: when changing the data shown by the window (e.g., modifying the
configurations of robots in a WorldModel) you must call vis.lock() before
accessing the data and then call vis.unlock() afterwards.
The main interface is as follows:
def createWindow(title=None): creates a new visualization window and returns an
integer identifier.
def setWindow(id): sets the active window for all subsequent calls. ID 0 is
the default visualization window.
def getWindow(): gets the active window ID.
def setWindowTitle(title): sets the title of the visualization window.
def getWindowTitle(): returns the title of the visualization window
def setPlugin(plugin=None): sets the current plugin (a GLPluginInterface instance).
This plugin will now capture input from the visualization and can override
any of the default behavior of the visualizer. Set plugin=None if you want to return
to the default visualization.
def addPlugin(plugin): adds a second OpenGL viewport governed by the given plugin (a
GLPluginInterface instance).
def run([plugin]): pops up a dialog and then kills the program afterwards.
def kill(): kills all previously launched visualizations. Afterwards, you may not
be able to start new windows. Call this to cleanly quit.
def dialog(): pops up a dialog box (does not return to calling
thread until closed).
def show(hidden=False): shows/hides a visualization window run in parallel with the calling script.
def spin(duration): shows the visualization window for the desired amount
of time before returning, or until the user closes the window.
def shown(): returns true if the window is shown.
def lock(): locks the visualization world for editing. The visualization will
be paused until unlock() is called.
def unlock(): unlocks the visualization world. Must only be called once
after every lock().
def customUI(make_func): launches a user-defined UI window by calling make_func(gl_backend)
in the visualization thread. This can be used to build custom editors and windows that
are compatible with other visualization functionality. Here gl_backend is an instance of
_GLBackend instantiated for the current plugin.
def getViewport(): Returns the currently active viewport.
The following VisualizationPlugin methods are also added to the klampt.vis namespace
and operate on the default plugin. If you are calling these methods from an external
loop (as opposed to inside a plugin) be sure to lock/unlock the visualization before/after
calling these methods.
def add(name,item,keepAppearance=False): adds an item to the visualization.
name is a unique identifier. If an item with the same name already exists,
it will no longer be shown. If keepAppearance=True, then the prior item's
appearance will be kept, if a prior item exists.
def clear(): clears the visualization world.
def listItems(): prints out all names of visualization objects
def listItems(name): prints out all names of visualization objects under the given name
def dirty(item_name='all'): marks the given item as dirty and recreates the
OpenGL display lists. You may need to call this if you modify an item's geometry,
for example.
def remove(name): removes an item from the visualization.
def setItemConfig(name,vector): sets the configuration of a named item.
def getItemConfig(name): returns the configuration of a named item.
def hide(name,hidden=True): hides/unhides an item. The item is not removed,
it just becomes invisible.
def edit(name,doedit=True): turns on/off visual editing of some item. Only points,
transforms, coordinate.Point's, coordinate.Transform's, coordinate.Frame's,
robots, and objects are currently accepted.
def hideLabel(name,hidden=True): hides/unhides an item's text label.
def setAppearance(name,appearance): changes the Appearance of an item.
def revertAppearance(name): restores the Appearance of an item
def setAttribute(name,attribute,value): sets an attribute of the appearance
of an item. Typical attributes are 'color', 'size', 'length', 'width'...
TODO: document all accepted attributes.
def setColor(name,r,g,b,a=1.0): changes the color of an item.
def setDrawFunc(name,func): sets a custom OpenGL drawing function for an item.
func is a one-argument function that takes the item data as input. Set
func to None to revert to default drawing.
def animate(name,animation,speed=1.0,endBehavior='loop'): Sends an animation to the
object. May be a Trajectory or a list of configurations. Works with points,
so3 elements, se3 elements, rigid objects, or robots.
- speed: a modulator on the animation speed. If the animation is a list of
milestones, it is by default run at 1 milestone per second.
- endBehavior: either 'loop' (animation repeats forever) or 'halt' (plays once).
def pauseAnimation(paused=True): Turns on/off animation.
def stepAnimation(amount): Moves forward the animation time by the given amount
in seconds
def animationTime(newtime=None): Gets/sets the current animation time
If newtime == None (default), this gets the animation time.
If newtime != None, this sets a new animation time.
def addText(name,text,position=None): adds text. You need to give an
identifier to all pieces of text, which will be used to access the text as any other
vis object. If position is None, this is added as an on-screen display. If position
is of length 2, it is the (x,y) position of the upper left corner of the text on the
screen. Negative units anchor the text to the right or bottom of the window.
If position is of length 3, the text is drawn in the world coordinates. You can
then set the color, 'size' attribute, and 'position' attribute of the text using the
identifier given in 'name'.
def clearText(): clears all previously added text.
def addPlot(name): creates a new empty plot.
def addPlotItem(name,itemname): adds a visualization item to a plot.
def logPlot(name,itemname,value): logs a custom visualization item to a plot
def logPlotEvent(name,eventname,color=None): logs an event on the plot.
def hidePlotItem(name,itemname,hidden=True): hides an item in the plot. To hide a
particular channel of a given item pass a pair (itemname,channelindex). For example,
to hide configurations 0-5 of 'robot', call hidePlotItem('plot',('robot',0)), ...,
hidePlotItem('plot',('robot',5)).
def setPlotDuration(name,time): sets the plot duration.
def setPlotRange(name,vmin,vmax): sets the y range of a plot.
def setPlotPosition(name,x,y): sets the upper left position of the plot on the screen.
def setPlotSize(name,w,h): sets the width and height of the plot.
def savePlot(name,fn): saves a plot to a CSV (extension .csv) or Trajectory (extension
.traj) file.
def autoFitCamera(scale=1.0): Automatically fits the camera to all objects in the
visualization. A scale > 1 magnifies the camera zoom.
Utility function:
def autoFitViewport(viewport,objects): Automatically fits the viewport's camera to
see all the given objects.
NAMING CONVENTION:
The world, if one exists, should be given the name 'world'. Configurations and paths are drawn
with reference to the first robot in the world.
All items that refer to a name (except add) can either be given a top level item name
(a string) or a sub-item (a sequence of strings, given a path from the root to the leaf).
For example, if you've added a RobotWorld under the name 'world' containing a robot called
'myRobot', then setColor(('world','myRobot'),0,1,0) will turn the robot green. If 'link5'
is the robot's 5th link, then setColor(('world','myRobot','link5'),0,0,1) will turn the 5th
link blue.
"""
from OpenGL.GL import *
from threading import Thread,RLock
from ..robotsim import *
from ..math import vectorops,so3,se3
import gldraw
from glinit import *
from glinit import _GLBackend,_PyQtAvailable,_GLUTAvailable
from glinterface import GLPluginInterface
from glprogram import GLPluginProgram
import glcommon
import time
import signal
import weakref
from ..model import types
from ..model import config
from ..model import coordinates
from ..model.subrobot import SubRobotModel
from ..model.trajectory import *
from ..model.contact import ContactPoint,Hold
class WindowInfo:
"""Mode can be hidden, shown, or dialog"""
def __init__(self,name,frontend,vis,glwindow=None):
self.name = name
self.frontend = frontend
self.vis = vis
self.glwindow = glwindow
self.mode = 'hidden'
self.guidata = None
self.custom_ui = None
self.doRefresh = False
self.doReload = False
self.worlds = []
self.active_worlds = []
_globalLock = RLock()
_vis = None
_frontend = GLPluginProgram()
_window_title = "Klamp't visualizer"
_current_worlds = []
_windows = []
_current_window = None
def createWindow(name):
"""Creates a new window (and sets it active)."""
global _globalLock,_frontend,_vis,_window_title,_current_worlds,_windows,_current_window
_globalLock.acquire()
if len(_windows) == 0:
#save the defaults in window 0
_windows.append(WindowInfo(_window_title,_frontend,_vis))
_windows[-1].worlds = _current_worlds
_windows[-1].active_worlds = _current_worlds[:]
#make a new window
_window_title = name
_frontend = GLPluginProgram()
_vis = VisualizationPlugin()
_frontend.setPlugin(_vis)
_windows.append(WindowInfo(_window_title,_frontend,_vis))
_current_worlds = []
id = len(_windows)-1
_current_window = id
_globalLock.release()
return id
def setWindow(id):
"""Sets currently active window."""
global _globalLock,_frontend,_vis,_window_title,_windows,_current_window,_current_worlds
if id == _current_window:
return
_globalLock.acquire()
if len(_windows) == 0:
#save the defaults in window 0
_windows.append(WindowInfo(_window_title,_frontend,_vis))
_windows[-1].worlds = _current_worlds
_windows[-1].active_worlds = _current_worlds[:]
assert id >= 0 and id < len(_windows),"Invalid window id"
_window_title,_frontend,_vis,_current_worlds = _windows[id].name,_windows[id].frontend,_windows[id].vis,_windows[id].worlds
#print "vis.setWindow(",id,") the window has status",_windows[id].mode
if not _PyQtAvailable:
#PyQt interface allows sharing display lists but GLUT does not.
#refresh all worlds' display lists that were once active.
for w in _current_worlds:
if w in _windows[_current_window].active_worlds:
print "klampt.vis.setWindow(): world",w().index,"becoming active in the new window",id
_refreshDisplayLists(w())
_windows[_current_window].active_worlds.remove(w)
_windows[id].active_worlds = _current_worlds[:]
_current_window = id
_globalLock.release()
def getWindow():
"""Retrieves ID of currently active window or -1 if no window is active"""
global _current_window
if _current_window == None: return 0
return _current_window
def setPlugin(plugin):
"""Lets the user capture input via a glinterface.GLPluginInterface class.
Set plugin to None to disable plugins and return to the standard visualization"""
global _globalLock,_frontend,_windows,_current_window
_globalLock.acquire()
if not isinstance(_frontend,GLPluginProgram):
_frontend = GLPluginProgram()
if _current_window != None:
if _windows[_current_window].glwindow != None:
_frontend.window = _windows[_current_window].glwindow
if plugin == None:
global _vis
if _vis==None:
raise RuntimeError("Visualization disabled")
_frontend.setPlugin(_vis)
else:
_frontend.setPlugin(plugin)
if hasattr(plugin,'world'):
_checkWindowCurrent(plugin.world)
_onFrontendChange()
_globalLock.release()
def pushPlugin(plugin):
"""Adds a new glinterface.GLPluginInterface plugin on top of the old one."""
global _globalLock,_frontend
_globalLock.acquire()
assert isinstance(_frontend,GLPluginProgram),"Can't push a plugin after addPlugin"
if len(_frontend.plugins) == 0:
global _vis
if _vis==None:
raise RuntimeError("Visualization disabled")
_frontend.setPlugin(_vis)
_frontend.pushPlugin(plugin)
_onFrontendChange()
_globalLock.release()
def popPlugin():
"""Reverses a prior pushPlugin() call"""
global _frontend
_globalLock.acquire()
_frontend.popPlugin()
_onFrontendChange()
_globalLock.release()
def addPlugin(plugin):
"""Adds a second OpenGL viewport in the same window, governed by the given plugin (a
glinterface.GLPluginInterface instance)."""
global _frontend
_globalLock.acquire()
#create a multi-view widget
if isinstance(_frontend,glcommon.GLMultiViewportProgram):
_frontend.addView(plugin)
else:
if len(_frontend.plugins) == 0:
setPlugin(None)
multiProgram = glcommon.GLMultiViewportProgram()
multiProgram.window = None
if _current_window != None:
if _windows[_current_window].glwindow != None:
multiProgram.window = _windows[_current_window].glwindow
multiProgram.addView(_frontend)
multiProgram.addView(plugin)
multiProgram.name = _window_title
_frontend = multiProgram
_onFrontendChange()
_globalLock.release()
def run(plugin=None):
"""A blocking call to start a single window and then kill the visualization
when closed. If plugin == None, the default visualization is used.
Otherwise, plugin is a glinterface.GLPluginInterface object, and it is used."""
setPlugin(plugin)
show()
while shown():
time.sleep(0.1)
setPlugin(None)
kill()
def dialog():
"""A blocking call to start a single dialog window with the current plugin. It is
closed by pressing OK or closing the window."""
_dialog()
def setWindowTitle(title):
global _window_title
_window_title = title
_onFrontendChange()
def getWindowTitle():
global _window_title
return _window_title
def kill():
"""This should be called at the end of the calling program to cleanly terminate the
visualization thread"""
global _vis,_globalLock
if _vis==None:
print "vis.kill() Visualization disabled"
return
_kill()
def show(display=True):
"""Shows or hides the current window"""
_globalLock.acquire()
if display:
_show()
else:
_hide()
_globalLock.release()
def spin(duration):
"""Spin-shows a window for a certain duration or until the window is closed."""
show()
t = 0
while t < duration:
if not shown(): break
time.sleep(min(0.04,duration-t))
t += 0.04
show(False)
return
def lock():
"""Begins a locked section. Needs to be called any time you modify a visualization item outside
of the visualization thread. unlock() must be called to let the visualization thread proceed."""
global _globalLock
_globalLock.acquire()
def unlock():
"""Ends a locked section acquired by lock()."""
global _globalLock,_windows
for w in _windows:
if w.glwindow:
w.doRefresh = True
_globalLock.release()
def shown():
"""Returns true if a visualization window is currently shown."""
global _globalLock,_thread_running,_current_window
_globalLock.acquire()
res = (_thread_running and _current_window != None and _windows[_current_window].mode in ['shown','dialog'] or _windows[_current_window].guidata is not None)
_globalLock.release()
return res
def customUI(func):
"""Tells the next created window/dialog to use a custom UI function. func is a 1-argument function that
takes a QtWindow or GLUTWindow as its argument."""
global _globalLock
_globalLock.acquire()
_set_custom_ui(func)
_globalLock.release()
def getViewport():
"""Returns the GLViewport of the current window (see klampt.vis.glprogram.GLViewport)"""
return _frontend.get_view()
def setViewport(viewport):
"""Sets the current window to use a given GLViewport (see klampt.vis.glprogram.GLViewport)"""
_frontend.set_view(viewport)
def clear():
"""Clears the visualization world."""
global _vis
if _vis==None:
return
_vis.clear()
def add(name,item,keepAppearance=False):
"""Adds an item to the visualization. name is a unique identifier. If an item with
the same name already exists, it will no longer be shown. If keepAppearance=True, then
the prior item's appearance will be kept, if a prior item exists."""
global _vis
if _vis==None:
print "Visualization disabled"
return
_globalLock.acquire()
_checkWindowCurrent(item)
_globalLock.release()
_vis.add(name,item,keepAppearance)
def listItems(name=None,indent=0):
global _vis
if _vis==None:
print "Visualization disabled"
return
_vis.listItems(name,indent)
def dirty(item_name='all'):
"""Marks the given item as dirty and recreates the OpenGL display lists. You may need
to call this if you modify an item's geometry, for example. If things start disappearing
from your world when you create a new window, you may need to call this too."""
global _vis
if _vis==None:
print "Visualization disabled"
return
_vis.dirty(item_name)
def animate(name,animation,speed=1.0,endBehavior='loop'):
"""Sends an animation to the named object.
Works with points, so3 elements, se3 elements, rigid objects, or robots, and may work
with other objects as well.
Parameters:
- animation: may be a Trajectory or a list of configurations.
- speed: a modulator on the animation speed. If the animation is a list of
milestones, it is by default run at 1 milestone per second.
- endBehavior: either 'loop' (animation repeats forever) or 'halt' (plays once).
"""
global _vis
if _vis==None:
print "Visualization disabled"
return
_vis.animate(name,animation,speed,endBehavior)
def pauseAnimation(paused=True):
global _vis
if _vis==None:
print "Visualization disabled"
return
_vis.pauseAnimation(paused)
def stepAnimation(amount):
global _vis
if _vis==None:
print "Visualization disabled"
return
_vis.stepAnimation(amount)
def animationTime(newtime=None):
"""Gets/sets the current animation time
If newtime == None (default), this gets the animation time.
If newtime != None, this sets a new animation time.
"""
global _vis
if _vis==None:
print "Visualization disabled"
return 0
return _vis.animationTime(newtime)
def remove(name):
global _vis
if _vis==None:
return
return _vis.remove(name)
def getItemConfig(name):
global _vis
if _vis==None:
return None
return _vis.getItemConfig(name)
def setItemConfig(name,value):
global _vis
if _vis==None:
return
return _vis.setItemConfig(name,value)
def hideLabel(name,hidden=True):
global _vis
if _vis==None:
return
return _vis.hideLabel(name,hidden)
def hide(name,hidden=True):
global _vis
if _vis==None:
return
_vis.hide(name,hidden)
def edit(name,doedit=True):
"""Turns on/off visual editing of some item. Only points, transforms,
coordinate.Point's, coordinate.Transform's, coordinate.Frame's, robots,
and objects are currently accepted."""
global _vis
if _vis==None:
return
_vis.edit(name,doedit)
def setAppearance(name,appearance):
global _vis
if _vis==None:
return
_vis.setAppearance(name,appearance)
def setAttribute(name,attr,value):
global _vis
if _vis==None:
return
_vis.setAttribute(name,attr,value)
def revertAppearance(name):
global _vis
if _vis==None:
return
_vis.revertAppearance(name)
def setColor(name,r,g,b,a=1.0):
global _vis
if _vis==None:
return
_vis.setColor(name,r,g,b,a)
def setDrawFunc(name,func):
global _vis
if _vis==None:
return
_vis.setDrawFunc(name,func)
def _getOffsets(object):
if isinstance(object,WorldModel):
res = []
for i in range(object.numRobots()):
res += _getOffsets(object.robot(i))
for i in range(object.numRigidObjects()):
res += _getOffsets(object.rigidObject(i))
return res
elif isinstance(object,RobotModel):
q = object.getConfig()
object.setConfig([0.0]*len(q))
worig = [object.link(i).getTransform()[1] for i in range(object.numLinks())]
object.setConfig(q)
wnew = [object.link(i).getTransform()[1] for i in range(object.numLinks())]
return [vectorops.sub(b,a) for a,b in zip(worig,wnew)]
elif isinstance(object,RigidObjectModel):
return [object.getTransform()[1]]
elif isinstance(object,Geometry3D):
return object.getCurrentTransform()[1]
elif isinstance(object,VisAppearance):
res = _getOffsets(object.item)
if len(res) != 0: return res
if len(object.subAppearances) == 0:
bb = object.getBounds()
if bb != None and not aabb_empty(bb):
return [vectorops.mul(vectorops.add(bb[0],bb[1]),0.5)]
else:
res = []
for a in object.subAppearances.itervalues():
res += _getOffsets(a)
return res
return []
def _getBounds(object):
if isinstance(object,WorldModel):
res = []
for i in range(object.numRobots()):
res += _getBounds(object.robots(i))
for i in range(object.numRigidObjects()):
res += _getBounds(object.rigidObject(i))
return res
elif isinstance(object,RobotModel):
return sum([object.link(i).geometry().getBB() for i in range(object.numLinks())],[])
elif isinstance(object,RigidObjectModel):
return object.geometry().getAABB()
elif isinstance(object,Geometry3D):
return object.getAABB()
elif isinstance(object,VisAppearance):
if len(object.subAppearances) == 0:
if isinstance(object.item,TerrainModel):
return []
bb = object.getBounds()
if bb != None and not aabb_empty(bb):
return list(bb)
else:
res = []
for a in object.subAppearances.itervalues():
res += _getBounds(a)
return res
return []
def _fitPlane(pts):
import numpy as np
if len(pts) < 3:
raise ValueError("Point set is degenerate")
centroid = vectorops.div(vectorops.add(*pts),len(pts))
A = np.array([vectorops.sub(pt,centroid) for pt in pts])
U,S,V = np.linalg.svd(A,full_matrices=False)
imin = 0
smin = S[0]
zeros = []
for i in xrange(len(S)):
if abs(S[i]) < 1e-6:
zeros.append(i)
if abs(S[i]) < smin:
smin = S[i]
imin = i
if len(zeros) > 1:
raise ValueError("Point set is degenerate")
assert V.shape == (3,3)
#normal is the corresponding row of U
normal = V[imin,:]
return centroid,normal.tolist()
def autoFitViewport(viewport,objects):
ofs = sum([_getOffsets(o) for o in objects],[])
pts = sum([_getBounds(o) for o in objects],[])
#print "Bounding box",bb,"center",center
#raw_input()
#reset
viewport.camera.rot = [0.,0.,0.]
viewport.camera.tgt = [0.,0.,0.]
viewport.camera.dist = 6.0
viewport.clippingplanes = (0.2,20)
if len(ofs) == 0:
return
bb = aabb_create(*pts)
center = vectorops.mul(vectorops.add(bb[0],bb[1]),0.5)
viewport.camera.tgt = center
radius = max(vectorops.distance(bb[0],center),0.1)
viewport.camera.dist = 1.2*radius / math.tan(math.radians(viewport.fov*0.5))
#default: oblique view
viewport.camera.rot = [0,math.radians(30),math.radians(45)]
#fit a plane to these points
try:
centroid,normal = _fitPlane(ofs)
except Exception as e:
try:
centroid,normal = _fitPlane(pts)
except Exception as e:
print "Exception occurred during fitting to points"
print ofs
print pts
raise
return
if normal[2] > 0:
normal = vectorops.mul(normal,-1)
z,x,y = so3.matrix(so3.inv(so3.canonical(normal)))
#print z,x,y
#raw_input()
radius = max([abs(vectorops.dot(x,vectorops.sub(center,pt))) for pt in pts] + [abs(vectorops.dot(y,vectorops.sub(center,pt)))*viewport.w/viewport.h for pt in pts])
zmin = min([vectorops.dot(z,vectorops.sub(center,pt)) for pt in pts])
zmax = max([vectorops.dot(z,vectorops.sub(center,pt)) for pt in pts])
#print "Viewing direction",normal,"at point",center,"with scene size",radius
#orient camera to point along normal direction
viewport.camera.tgt = center
viewport.camera.dist = 1.2*radius / math.tan(math.radians(viewport.fov*0.5))
near,far = viewport.clippingplanes
if viewport.camera.dist + zmin < near:
near = max((viewport.camera.dist + zmin)*0.5, radius*0.1)
if viewport.camera.dist + zmax > far:
far = max((viewport.camera.dist + zmax)*1.5, radius*3)
viewport.clippingplanes = (near,far)
roll = 0
yaw = math.atan2(normal[0],normal[1])
pitch = math.atan2(-normal[2],vectorops.norm(normal[0:2]))
#print "Roll pitch and yaw",roll,pitch,yaw
#print "Distance",viewport.camera.dist
viewport.camera.rot = [roll,pitch,yaw]
def addText(name,text,pos=None):
"""Adds text to the visualizer. You must give an identifier to all pieces of
text, which will be used to access the text as any other vis object.
Parameters:
- name: the text's unique identifier.
- text: the string to be drawn
- pos: the position of the string. If pos=None, this is added to the on-screen "console" display.
If pos has length 2, it is the (x,y) position of the upper left corner of the text on the
screen. Negative units anchor the text to the right or bottom of the window.
If pos has length 3, the text is drawn in the world coordinates.
To customize the text appearance, you can set the color, 'size' attribute, and 'position'
attribute of the text using the identifier given in 'name'.
"""
global _vis
_vis.add(name,text,True)
if pos is not None:
_vis.setAttribute(name,'position',pos)
def clearText():
"""Clears all text in the visualization."""
global _vis
if _vis==None:
return
_vis.clearText()
def addPlot(name):
add(name,VisPlot())
def addPlotItem(name,itemname):
global _vis
if _vis==None:
return
_vis.addPlotItem(name,itemname)
def logPlot(name,itemname,value):
"""Logs a custom visualization item to a plot"""
global _vis
if _vis==None:
return
_vis.logPlot(name,itemname,value)
def logPlotEvent(name,eventname,color=None):
"""Logs an event on the plot."""
global _vis
if _vis==None:
return
_vis.logPlotEvent(name,eventname,color)
def hidePlotItem(name,itemname,hidden=True):
global _vis
if _vis==None:
return
_vis.hidePlotItem(name,itemname,hidden)
def setPlotDuration(name,time):
setAttribute(name,'duration',time)
def setPlotRange(name,vmin,vmax):
setAttribute(name,'range',(vmin,vmax))
def setPlotPosition(name,x,y):
setAttribute(name,'position',(x,y))
def setPlotSize(name,w,h):
setAttribute(name,'size',(w,h))
def savePlot(name,fn):
global _vis
if _vis==None:
return
_vis.savePlot(name,fn)
def autoFitCamera(scale=1):
global _vis
if _vis==None:
return
print "klampt.vis: auto-fitting camera to scene."
_vis.autoFitCamera(scale)
def objectToVisType(item,world):
itypes = types.objectToTypes(item,world)
if isinstance(itypes,(list,tuple)):
#ambiguous, still need to figure out what to draw
validtypes = []
for t in itypes:
if t == 'Config':
if world != None and len(item) == world.robot(0).numLinks():
validtypes.append(t)
elif t=='Vector3':
validtypes.append(t)
elif t=='RigidTransform':
validtypes.append(t)
if len(validtypes) > 1:
print "Unable to draw item of ambiguous types",validtypes
return
if len(validtypes) == 0:
print "Unable to draw any of types",itypes
return
return validtypes[0]
return itypes
def aabb_create(*ptlist):
if len(ptlist) == 0:
return [float('inf')]*3,[float('-inf')]*3
else:
bmin,bmax = list(ptlist[0]),list(ptlist[0])
for i in xrange(1,len(ptlist)):
x = ptlist[i]
bmin = [min(a,b) for (a,b) in zip(bmin,x)]
bmax = [max(a,b) for (a,b) in zip(bmax,x)]
return bmin,bmax
def aabb_expand(bb,bb2):
bmin = [min(a,b) for a,b in zip(bb[0],bb2[0])]
bmax = [max(a,b) for a,b in zip(bb[1],bb2[1])]
return (bmin,bmax)
def aabb_empty(bb):
return any((a > b) for (a,b) in zip(bb[0],bb[1]))
_defaultCompressThreshold = 1e-2
class VisPlotItem:
def __init__(self,itemname,linkitem):
self.name = itemname
self.itemnames = []
self.linkitem = linkitem
self.traces = []
self.hidden = []
self.traceRanges = []
self.luminosity = []
self.compressThreshold = _defaultCompressThreshold
if linkitem is not None:
q = config.getConfig(linkitem.item)
assert q is not None
from collections import deque
self.traces = [deque() for i in range(len(q))]
self.itemnames = config.getConfigNames(linkitem.item)
def customUpdate(self,item,t,v):
for i,itemname in enumerate(self.itemnames):
if item == itemname:
self.updateTrace(i,t,v)
self.traceRanges[i] = (min(self.traceRanges[i][0],v),max(self.traceRanges[i][1],v))
return
else:
from collections import deque
self.itemnames.append(item)
self.traces.append(deque())
i = len(self.itemnames)-1
self.updateTrace(i,t,v)
self.traceRanges[i] = (min(self.traceRanges[i][0],v),max(self.traceRanges[i][1],v))
#raise ValueError("Invalid item specified: "+str(item))
def update(self,t):
if self.linkitem is None:
return
q = config.getConfig(self.linkitem.item)
assert len(self.traces) == len(q)
for i,v in enumerate(q):
self.updateTrace(i,t,v)
self.traceRanges[i] = (min(self.traceRanges[i][0],v),max(self.traceRanges[i][1],v))
def discard(self,tstart):
for t in self.traces:
if len(t)<=1: return
while len(t) >= 2:
if t[1][0] < tstart:
t.popleft()
else:
break
def updateTrace(self,i,t,v):
import random
assert i < len(self.traces)
assert i <= len(self.hidden)
assert i <= len(self.luminosity)
while i >= len(self.hidden):
self.hidden.append(False)
while i >= len(self.traceRanges):
self.traceRanges.append((v,v))
if i >= len(self.luminosity):
initialLuminosity = [0.5,0.25,0.75,1.0]
while i >= len(self.luminosity):
if len(self.luminosity)<len(initialLuminosity):
self.luminosity.append(initialLuminosity[len(self.luminosity)])
else:
self.luminosity.append(random.uniform(0,1))
trace = self.traces[i]
if len(trace) > 0 and trace[-1][0] == t:
trace[-1] = (t,v)
return
if self.compressThreshold is None:
trace.append((t,v))
else:
if len(trace) < 2:
trace.append((t,v))
else:
pprev = trace[-2]
prev = trace[-1]
assert prev > pprev,"Added two items with the same time?"
assert t > prev[0]
slope_old = (prev[1]-pprev[1])/(prev[0]-pprev[0])
slope_new = (v-prev[1])/(t-prev[0])
if (slope_old > 0 != slope_new > 0) or abs(slope_old-slope_new) > self.compressThreshold:
trace.append((t,v))
else:
#near-linear, just extend along straight line
trace[-1] = (t,v)
class VisPlot:
def __init__(self):
self.items = []
self.colors = []
self.events = dict()
self.eventColors = dict()
self.outfile = None
self.outformat = None
def __del__(self):
self.endSave()
def update(self,t,duration,compressThreshold):
for i in self.items:
i.compressThreshold = compressThreshold
i.update(t)
if self.outfile:
self.dumpCurrent()
self.discard(t-duration)
else:
self.discard(t-60.0)
def discard(self,tmin):
for i in self.items:
i.discard(tmin)
delevents = []
for e,times in self.events.iteritems():
while len(times) > 0 and times[0] < tmin:
times.popleft()
if len(times)==0:
delevents.append(e)
for e in delevents:
del self.events[e]
def addEvent(self,name,t,color=None):
if name in self.events:
self.events[name].append(t)
else:
from collections import deque
self.events[name] = deque([t])
if color == None:
import random
color = (random.uniform(0.01,1),random.uniform(0.01,1),random.uniform(0.01,1))
color = vectorops.mul(color,1.0/max(color))
if color != None:
self.eventColors[name] = color
if len(color)==3:
self.eventColors[name] += [1.0]
def autoRange(self):
vmin = float('inf')
vmax = -float('inf')
for i in self.items:
for j in xrange(len(i.traceRanges)):
if not i.hidden[j]:
vmin = min(vmin,i.traceRanges[j][0])
vmax = max(vmax,i.traceRanges[j][1])
if math.isinf(vmin):
return (0.,1.)
if vmax == vmin:
vmax += 1.0
return (float(vmin),float(vmax))
def render(self,window,x,y,w,h,duration,vmin=None,vmax=None):
if vmin == None:
vmin,vmax = self.autoRange()
import random
while len(self.colors) < len(self.items):
c = (random.uniform(0.01,1),random.uniform(0.01,1),random.uniform(0.01,1))
c = vectorops.mul(c,1.0/max(c))
self.colors.append(c)
glColor3f(0,0,0)
glBegin(GL_LINE_LOOP)
glVertex2f(x,y)
glVertex2f(x+w,y)
glVertex2f(x+w,y+h)
glVertex2f(x,y+h)
glEnd()
window.draw_text((x-18,y+4),'%.2f'%(vmax,),9)
window.draw_text((x-18,y+h+4),'%.2f'%(vmin,),9)
tmax = 0
for i in self.items:
for trace in i.traces:
if len(trace)==0: continue
tmax = max(tmax,trace[-1][0])
for i,item in enumerate(self.items):
for j,trace in enumerate(item.traces):
if len(trace)==0: continue
labelheight = trace[-1][1]
if len(item.name)==0:
label = item.itemnames[j]
else:
label = str(item.name) + '.' + item.itemnames[j]
labelheight = (labelheight - vmin)/(vmax-vmin)
labelheight = y + h - h*labelheight
glColor3fv(vectorops.mul(self.colors[i],item.luminosity[j]))
window.draw_text((x+w+3,labelheight+4),label,9)
glBegin(GL_LINE_STRIP)
for k in xrange(len(trace)-1):
if trace[k+1][0] > tmax-duration:
u,v = trace[k]
if trace[k][0] < tmax-duration:
#interpolate so x is at tmax-duration
u2,v2 = trace[k+1]
#u + s(u2-u) = tmax-duration
s = (tmax-duration-u)/(u2-u)
v = v + s*(v2-v)
u = (tmax-duration)
u = (u-(tmax-duration))/duration
v = (v-vmin)/(vmax-vmin)
glVertex2f(x+w*u,y+(1-v)*h)
u,v = trace[-1]
u = (u-(tmax-duration))/duration
v = (v-vmin)/(vmax-vmin)
glVertex2f(x+w*u,y+(1-v)*h)
glEnd()
if len(self.events) > 0:
for e,times in self.events.iteritems():
for t in times:
if t < tmax-duration: continue
labelx = (t - (tmax-duration))/duration
labelx = x + w*labelx
c = self.eventColors[e]
glColor4f(c[0]*0.5,c[1]*0.5,c[2]*0.5,c[3])
window.draw_text((labelx,y+h+12),e,9)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA)
glBegin(GL_LINES)
for e,times in self.events.iteritems():
for t in times:
if t < tmax-duration: continue
labelx = (t - (tmax-duration))/duration
labelx = x + w*labelx
glColor4f(c[0],c[1],c[2],c[3]*0.5)
glVertex2f(labelx,y)
glVertex2f(labelx,y+h)
glEnd()
glDisable(GL_BLEND)
def beginSave(self,fn):
import os
ext = os.path.splitext(fn)[1]
if ext == '.csv' or ext == '.traj':
self.outformat = ext
else:
raise ValueError("Invalid extension for visualization plot, can only accept .csv or .traj")
self.outfile = open(fn,'w')
if self.outformat == '.csv':
#output a header
self.outfile.write("time")
for i in self.items:
self.outfile.write(",")
fullitemnames = []
if len(i.name) != 0:
name = None
if isinstance(i.name,(list,tuple)):
name = '.'.join(v for v in i.name)
else:
name = i.name
fullitemnames = [name+'.'+itemname for itemname in i.itemnames]
else:
fullitemnames = i.itemnames
self.outfile.write(",".join(fullitemnames))
self.outfile.write("\n")
self.dumpAll()
def endSave(self):
if self.outfile is not None:
self.outfile.close()
def dumpAll(self):
assert self.outfile is not None
if len(self.items) == 0: return
cols = []
mindt = float('inf')
mint = float('inf')
maxt = -float('inf')
for i in self.items:
if len(i.traces) == 0:
continue
for j,trace in enumerate(i.traces):
times,vals = zip(*trace)
if isinstance(vals[0],(int,float)):
vals = [[v] for v in vals]
traj = Trajectory(times,vals)
cols.append(traj)
mint = min(mint,traj.times[0])
maxt = max(maxt,traj.times[-1])
for k in xrange(len(traj.times)-1):
mindt = min(mindt,traj.times[k+1] - traj.times[k])
assert mindt > 0, "For some reason, there is a duplicate time?"
N = int((maxt - mint)/mindt)
dt = (maxt - mint)/N
times = [mint + i*(maxt-mint)/N for i in range(N+1)]
for i in xrange(N+1):
vals = [col.eval(times[i]) for col in cols]
if self.outformat == '.csv':
self.outfile.write(str(times[i])+',')
self.outfile.write(','.join([str(v[0]) for v in vals]))
self.outfile.write('\n')
else:
self.outfile.write(str(times[i])+'\t')
self.outfile.write(str(len(vals))+' ')
self.outfile.write(' '.join([str(v[0]) for v in vals]))
self.outfile.write('\n')
def dumpCurrent(self):
if len(self.items) == 0: return
assert len(self.items[0].trace) > 0, "Item has no channels?"
assert len(self.items[0].trace[0]) > 0, "Item has no readings yet?"
t = self.items[0].trace[0][-1]
vals = []
for i in self.items:
if len(i.trace) == 0:
continue
for j,trace in enumerate(i.trace):
vals.append(trace[-1][1])
if self.outformat == '.csv':
self.outfile.write(str(t)+',')
self.outfile.write(','.join([str(v) for v in vals]))
self.outfile.write('\n')
else:
self.outfile.write(str(t)+'\t')
self.outfile.write(str(len(vals))+' ')
self.outfile.write(' '.join([str(v) for v in vals]))
self.outfile.write('\n')
class VisAppearance:
def __init__(self,item,name = None):
self.name = name
self.hidden = False
self.useDefaultAppearance = True
self.customAppearance = None
self.customDrawFunc = None
#For group items, this allows you to customize appearance of sub-items
self.subAppearances = {}
self.animation = None
self.animationStartTime = 0
self.animationSpeed = 1.0
self.attributes = {}
#used for Qt text rendering
self.widget = None
#used for visual editing of certain items
self.editor = None
#cached drawing
self.displayCache = [glcommon.CachedGLObject()]
self.displayCache[0].name = name
#temporary configuration of the item
self.drawConfig = None
self.setItem(item)
def setItem(self,item):
self.item = item
self.subAppearances = {}
#Parse out sub-items which can have their own appearance changed
if isinstance(item,WorldModel):
for i in xrange(item.numRobots()):
self.subAppearances[("Robot",i)] = VisAppearance(item.robot(i),item.robot(i).getName())
for i in xrange(item.numRigidObjects()):
self.subAppearances[("RigidObject",i)] = VisAppearance(item.rigidObject(i),item.rigidObject(i).getName())
for i in xrange(item.numTerrains()):
self.subAppearances[("Terrain",i)] = VisAppearance(item.terrain(i),item.terrain(i).getName())
elif isinstance(item,RobotModel):
for i in xrange(item.numLinks()):
self.subAppearances[("Link",i)] = VisAppearance(item.link(i),item.link(i).getName())
elif isinstance(item,coordinates.Group):
for n,f in item.frames.iteritems():
self.subAppearances[("Frame",n)] = VisAppearance(f,n)
for n,p in item.points.iteritems():
self.subAppearances[("Point",n)] = VisAppearance(p,n)
for n,d in item.directions.iteritems():
self.subAppearances[("Direction",n)] = VisAppearance(d,n)
for n,g in item.subgroups.iteritems():
self.subAppearances[("Subgroup",n)] = VisAppearance(g,n)
elif isinstance(item,Hold):
if item.ikConstraint is not None:
self.subAppearances["ikConstraint"] = VisAppearance(item.ikConstraint,"ik")
for n,c in enumerate(item.contacts):
self.subAppearances[("contact",n)] = VisAppearance(c,n)
for (k,a) in self.subAppearances.iteritems():
a.attributes = self.attributes
def markChanged(self):
for c in self.displayCache:
c.markChanged()
for (k,a) in self.subAppearances.iteritems():
a.markChanged()
self.update_editor(True)
self.doRefresh = True
def destroy(self):
for c in self.displayCache:
c.destroy()
for (k,a) in self.subAppearances.iteritems():
a.destroy()
self.subAppearances = {}
def drawText(self,text,point):
"""Draws the given text at the given point"""
if self.attributes.get("text_hidden",False): return
self.widget.addLabel(text,point[:],[0,0,0])
def updateAnimation(self,t):
"""Updates the configuration, if it's being animated"""
if not self.animation:
self.drawConfig = None
else:
u = self.animationSpeed*(t-self.animationStartTime)
q = self.animation.eval(u,self.animationEndBehavior)
self.drawConfig = q
for n,app in self.subAppearances.iteritems():
app.updateAnimation(t)
def updateTime(self,t):
"""Updates in real time"""
if isinstance(self.item,VisPlot):
compressThreshold = self.attributes.get('compress',_defaultCompressThreshold)
duration = self.attributes.get('duration',5.)
for items in self.item.items:
if items.linkitem:
items.linkitem.swapDrawConfig()
self.item.update(t,duration,compressThreshold)
for items in self.item.items:
if items.linkitem:
items.linkitem.swapDrawConfig()
def swapDrawConfig(self):
"""Given self.drawConfig!=None, swaps out the item's curren
configuration with self.drawConfig. Used for animations"""
if self.drawConfig:
try:
newDrawConfig = config.getConfig(self.item)
#self.item =
config.setConfig(self.item,self.drawConfig)
self.drawConfig = newDrawConfig
except Exception as e:
print "Warning, exception thrown during animation update. Probably have incorrect length of configuration"
import traceback
traceback.print_exc()
pass
for n,app in self.subAppearances.iteritems():
app.swapDrawConfig()
def clearDisplayLists(self):
if isinstance(self.item,WorldModel):
for r in range(self.item.numRobots()):
for link in range(self.item.robot(r).numLinks()):
self.item.robot(r).link(link).appearance().refresh()
for i in range(self.item.numRigidObjects()):
self.item.rigidObject(i).appearance().refresh()
for i in range(self.item.numTerrains()):
self.item.terrain(i).appearance().refresh()
elif hasattr(self.item,'appearance'):
self.item.appearance().refresh()
elif isinstance(self.item,RobotModel):
for link in range(self.item.numLinks()):
self.item.link(link).appearance().refresh()
for n,o in self.subAppearances.iteritems():
o.clearDisplayLists()
self.markChanged()
def draw(self,world=None):
"""Draws the specified item in the specified world. If name
is given and text_hidden != False, then the name of the item is
shown."""
if self.hidden: return
if self.customDrawFunc is not None:
self.customDrawFunc(self.item)
return
item = self.item
name = self.name
#set appearance
if not self.useDefaultAppearance and hasattr(item,'appearance'):
if not hasattr(self,'oldAppearance'):
self.oldAppearance = item.appearance().clone()
if self.customAppearance != None:
#print "Changing appearance of",name
item.appearance().set(self.customAppearance)
elif "color" in self.attributes:
#print "Changing color of",name
item.appearance().setColor(*self.attributes["color"])
if len(self.subAppearances)!=0:
for n,app in self.subAppearances.iteritems():
app.widget = self.widget
app.draw(world)
elif hasattr(item,'drawGL'):
item.drawGL()
elif hasattr(item,'drawWorldGL'):
item.drawWorldGL()
elif isinstance(item,str):
pos = self.attributes.get("position",None)
if pos is not None and len(pos)==3:
col = self.attributes.get("color",(0,0,0))
self.widget.addLabel(self.item,pos,col)
elif isinstance(item,VisPlot):
pass
elif isinstance(item,Trajectory):
doDraw = False
centroid = None
if isinstance(item,RobotTrajectory):
ees = self.attributes.get("endeffectors",[-1])
if world:
doDraw = (len(ees) > 0)
robot = world.robot(0)
for i,ee in enumerate(ees):
if ee < 0: ees[i] = robot.numLinks()-1
if doDraw:
robot.setConfig(item.milestones[0])
centroid = vectorops.div(vectorops.add(*[robot.link(ee).getTransform()[1] for ee in ees]),len(ees))
elif isinstance(item,SE3Trajectory):
doDraw = True
centroid = item.milestones[0][9:]
else:
if len(item.milestones[0]) == 3:
#R3 trajectory
doDraw = True
centroid = item.milestones[0]
elif len(item.milestones[0]) == 2:
#R2 trajectory
doDraw = True
centroid = item.milestones[0]+[0.0]
if doDraw:
def drawRaw():
pointTrajectories = []
if isinstance(item,RobotTrajectory):
robot = world.robot(0)
ees = self.attributes.get("endeffectors",[-1])
for i,ee in enumerate(ees):
if ee < 0: ees[i] = robot.numLinks()-1
if world:
for ee in ees:
pointTrajectories.append([])
for m in item.milestones:
robot.setConfig(m)
for ee,eetraj in zip(ees,pointTrajectories):
eetraj.append(robot.link(ee).getTransform()[1])
elif isinstance(item,SE3Trajectory):
pointTrajectories.append([])
for m in item.milestones:
pointTrajectories[-1].append(m[9:])
else:
if len(item.milestones[0]) == 3:
#R3 trajectory
pointTrajectories.append(item.milestones)
elif len(item.milestones[0]) == 2:
#R2 trajectory
pointTrajectories.append([v + [0.0] for v in item.milestones])
glDisable(GL_LIGHTING)
glLineWidth(self.attributes.get("width",3))
glColor4f(*self.attributes.get("color",[1,0.5,0,1]))
for traj in pointTrajectories:
if len(traj) == 1:
glBegin(GL_POINTS)
glVertex3f(*traj[0])
glEnd()
if len(traj) >= 2:
glBegin(GL_LINE_STRIP)
for p in traj:
glVertex3f(*p)
glEnd()
glLineWidth(1.0)
self.displayCache[0].draw(drawRaw,se3.identity())
if name != None:
self.drawText(name,centroid)
elif isinstance(item,coordinates.Point):
def drawRaw():
glDisable(GL_LIGHTING)
glEnable(GL_POINT_SMOOTH)
glPointSize(self.attributes.get("size",5.0))
glColor4f(*self.attributes.get("color",[0,0,0,1]))
glBegin(GL_POINTS)
glVertex3f(0,0,0)
glEnd()
#write name
glDisable(GL_DEPTH_TEST)
self.displayCache[0].draw(drawRaw,[so3.identity(),item.worldCoordinates()])
glEnable(GL_DEPTH_TEST)
if name != None:
self.drawText(name,item.worldCoordinates())
elif isinstance(item,coordinates.Direction):
def drawRaw():
glDisable(GL_LIGHTING)
glDisable(GL_DEPTH_TEST)
L = self.attributes.get("length",0.15)
source = [0,0,0]
glColor4f(*self.attributes.get("color",[0,1,1,1]))
glBegin(GL_LINES)
glVertex3f(*source)
glVertex3f(*vectorops.mul(item.localCoordinates(),L))
glEnd()
glEnable(GL_DEPTH_TEST)
#write name
self.displayCache[0].draw(drawRaw,item.frame().worldCoordinates(),parameters = item.localCoordinates())
if name != None:
self.drawText(name,vectorops.add(item.frame().worldCoordinates()[1],item.worldCoordinates()))
elif isinstance(item,coordinates.Frame):
t = item.worldCoordinates()
if item.parent() != None:
tp = item.parent().worldCoordinates()
else:
tp = se3.identity()
tlocal = item.relativeCoordinates()
def drawRaw():
glDisable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glLineWidth(2.0)
gldraw.xform_widget(tlocal,self.attributes.get("length",0.1),self.attributes.get("width",0.01))
glLineWidth(1.0)
#draw curve between frame and parent
if item.parent() != None:
d = vectorops.norm(tlocal[1])
vlen = d*0.5
v1 = so3.apply(tlocal[0],[-vlen]*3)
v2 = [vlen]*3
#glEnable(GL_BLEND)
#glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA)
#glColor4f(1,1,0,0.5)
glColor3f(1,1,0)
gldraw.hermite_curve(tlocal[1],v1,[0,0,0],v2,0.03*max(0.1,vectorops.norm(tlocal[1])))
#glDisable(GL_BLEND)
glEnable(GL_DEPTH_TEST)
#For some reason, cached drawing is causing OpenGL problems
#when the frame is rapidly changing
self.displayCache[0].draw(drawRaw,transform=tp, parameters = tlocal)
#glPushMatrix()
#glMultMatrixf(sum(zip(*se3.homogeneous(tp)),()))
#drawRaw()
#glPopMatrix()
#write name
if name != None:
self.drawText(name,t[1])
elif isinstance(item,coordinates.Transform):
#draw curve between frames
t1 = item.source().worldCoordinates()
if item.destination() != None:
t2 = item.destination().worldCoordinates()
else:
t2 = se3.identity()
d = vectorops.distance(t1[1],t2[1])
vlen = d*0.5
v1 = so3.apply(t1[0],[-vlen]*3)
v2 = so3.apply(t2[0],[vlen]*3)
def drawRaw():
glDisable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glColor3f(1,1,1)
gldraw.hermite_curve(t1[1],v1,t2[1],v2,0.03)
glEnable(GL_DEPTH_TEST)
#write name at curve
self.displayCache[0].draw(drawRaw,transform=None,parameters = (t1,t2))
if name != None:
self.drawText(name,spline.hermite_eval(t1[1],v1,t2[1],v2,0.5))
elif isinstance(item,coordinates.Group):
pass
elif isinstance(item,ContactPoint):
def drawRaw():
glDisable(GL_LIGHTING)
glEnable(GL_POINT_SMOOTH)
glPointSize(self.attributes.get("size",5.0))
l = self.attributes.get("length",0.05)
glColor4f(*self.attributes.get("color",[1,0.5,0,1]))
glBegin(GL_POINTS)
glVertex3f(0,0,0)
glEnd()
glBegin(GL_LINES)
glVertex3f(0,0,0)
glVertex3f(l,0,0)
glEnd()
self.displayCache[0].draw(drawRaw,[so3.canonical(item.n),item.x])
elif isinstance(item,Hold):
pass
else:
try:
itypes = objectToVisType(item,world)
except:
print "visualization.py: Unsupported object type",item,"of type:",item.__class__.__name__
return
if itypes == None:
print "Unable to convert item",item,"to drawable"
return
elif itypes == 'Config':
if world:
robot = world.robot(0)
if not self.useDefaultAppearance:
oldAppearance = [robot.link(i).appearance().clone() for i in xrange(robot.numLinks())]
for i in xrange(robot.numLinks()):
if self.customAppearance is not None:
robot.link(i).appearance().set(self.customAppearance)
elif "color" in self.attributes:
robot.link(i).appearance().setColor(*self.attributes["color"])
oldconfig = robot.getConfig()
robot.setConfig(item)
robot.drawGL()
robot.setConfig(oldconfig)
if not self.useDefaultAppearance:
for (i,app) in enumerate(oldAppearance):
robot.link(i).appearance().set(app)
else:
print "Unable to draw Config tiems without a world"
elif itypes == 'Configs':
if world:
maxConfigs = self.attributes.get("maxConfigs",min(10,len(item)))
robot = world.robot(0)
if not self.useDefaultAppearance:
oldAppearance = [robot.link(i).appearance().clone() for i in xrange(robot.numLinks())]
for i in xrange(robot.numLinks()):
if self.customAppearance is not None:
robot.link(i).appearance().set(self.customAppearance)
elif "color" in self.attributes:
robot.link(i).appearance().setColor(*self.attributes["color"])
oldconfig = robot.getConfig()
for i in xrange(maxConfigs):
idx = int(i*len(item))/maxConfigs
robot.setConfig(item[idx])
robot.drawGL()
robot.setConfig(oldconfig)
if not self.useDefaultAppearance:
for (i,app) in enumerate(oldAppearance):
robot.link(i).appearance().set(app)
else:
print "Unable to draw Configs items without a world"
elif itypes == 'Vector3':
def drawRaw():
glDisable(GL_LIGHTING)
glEnable(GL_POINT_SMOOTH)
glPointSize(self.attributes.get("size",5.0))
glColor4f(*self.attributes.get("color",[0,0,0,1]))
glBegin(GL_POINTS)
glVertex3f(0,0,0)
glEnd()
self.displayCache[0].draw(drawRaw,[so3.identity(),item])
if name != None:
self.drawText(name,item)
elif itypes == 'RigidTransform':
def drawRaw():
fancy = self.attributes.get("fancy",False)
if fancy: glEnable(GL_LIGHTING)
else: glDisable(GL_LIGHTING)
gldraw.xform_widget(se3.identity(),self.attributes.get("length",0.1),self.attributes.get("width",0.01),fancy=fancy)
self.displayCache[0].draw(drawRaw,transform=item)
if name != None:
self.drawText(name,item[1])
elif itypes == 'IKGoal':
if hasattr(item,'robot'):
#need this to be built with a robot element.
#Otherwise, can't determine the correct transforms
robot = item.robot
elif world:
if world.numRobots() >= 1:
robot = world.robot(0)
else:
robot = None
else:
robot = None
if robot != None:
link = robot.link(item.link())
dest = robot.link(item.destLink()) if item.destLink()>=0 else None
while len(self.displayCache) < 3:
self.displayCache.append(glcommon.CachedGLObject())
self.displayCache[1].name = self.name+" target position"
self.displayCache[2].name = self.name+" curve"
if item.numPosDims() != 0:
lp,wp = item.getPosition()
#set up parameters of connector
p1 = se3.apply(link.getTransform(),lp)
if dest != None:
p2 = se3.apply(dest.getTransform(),wp)
else:
p2 = wp
d = vectorops.distance(p1,p2)
v1 = [0.0]*3
v2 = [0.0]*3
if item.numRotDims()==3: #full constraint
R = item.getRotation()
def drawRaw():
gldraw.xform_widget(se3.identity(),self.attributes.get("length",0.1),self.attributes.get("width",0.01))
t1 = se3.mul(link.getTransform(),(so3.identity(),lp))
t2 = (R,wp) if dest==None else se3.mul(dest.getTransform(),(R,wp))
self.displayCache[0].draw(drawRaw,transform=t1)
self.displayCache[1].draw(drawRaw,transform=t2)
vlen = d*0.1
v1 = so3.apply(t1[0],[-vlen]*3)
v2 = so3.apply(t2[0],[vlen]*3)
elif item.numRotDims()==0: #point constraint
def drawRaw():
glDisable(GL_LIGHTING)
glEnable(GL_POINT_SMOOTH)
glPointSize(self.attributes.get("size",5.0))
glColor4f(*self.attributes.get("color",[0,0,0,1]))
glBegin(GL_POINTS)
glVertex3f(0,0,0)
glEnd()
self.displayCache[0].draw(drawRaw,transform=(so3.identity(),p1))
self.displayCache[1].draw(drawRaw,transform=(so3.identity(),p2))
#set up the connecting curve
vlen = d*0.5
d = vectorops.sub(p2,p1)
v1 = vectorops.mul(d,0.5)
#curve in the destination
v2 = vectorops.cross((0,0,0.5),d)
else: #hinge constraint
p = [0,0,0]
d = [0,0,0]
def drawRawLine():
glDisable(GL_LIGHTING)
glEnable(GL_POINT_SMOOTH)
glPointSize(self.attributes.get("size",5.0))
glColor4f(*self.attributes.get("color",[0,0,0,1]))
glBegin(GL_POINTS)
glVertex3f(*p)
glEnd()
glColor4f(*self.attributes.get("color",[0.5,0,0.5,1]))
glLineWidth(self.attributes.get("width",3.0))
glBegin(GL_LINES)
glVertex3f(*p)
glVertex3f(*vectorops.madd(p,d,self.attributes.get("length",0.1)))
glEnd()
glLineWidth(1.0)
ld,wd = item.getRotationAxis()
p = lp
d = ld
self.displayCache[0].draw(drawRawLine,transform=link.getTransform(),parameters=(p,d))
p = wp
d = wd
self.displayCache[1].draw(drawRawLine,transform=dest.getTransform() if dest else se3.identity(),parameters=(p,d))
#set up the connecting curve
d = vectorops.sub(p2,p1)
v1 = vectorops.mul(d,0.5)
#curve in the destination
v2 = vectorops.cross((0,0,0.5),d)
def drawConnection():
glDisable(GL_LIGHTING)
glDisable(GL_DEPTH_TEST)
glColor3f(1,0.5,0)
gldraw.hermite_curve(p1,v1,p2,v2,0.03*max(0.1,vectorops.distance(p1,p2)))
#glBegin(GL_LINES)
#glVertex3f(*p1)
#glVertex3f(*p2)
#glEnd()
glEnable(GL_DEPTH_TEST)
#TEMP for some reason the cached version sometimes gives a GL error
self.displayCache[2].draw(drawConnection,transform=None,parameters = (p1,v1,p2,v2))
#drawConnection()
if name != None:
self.drawText(name,wp)
else:
wp = link.getTransform()[1]
if item.numRotDims()==3: #full constraint
R = item.getRotation()
def drawRaw():
gldraw.xform_widget(se3.identity(),self.attributes.get("length",0.1),self.attributes.get("width",0.01))
self.displayCache[0].draw(drawRaw,transform=link.getTransform())
self.displayCache[1].draw(drawRaw,transform=se3.mul(link.getTransform(),(R,[0,0,0])))
elif item.numRotDims() > 0:
#axis constraint
d = [0,0,0]
def drawRawLine():
glDisable(GL_LIGHTING)
glColor4f(*self.attributes.get("color",[0.5,0,0.5,1]))
glLineWidth(self.attributes.get("width",3.0))
glBegin(GL_LINES)
glVertex3f(0,0,0)
glVertex3f(*vectorops.mul(d,self.attributes.get("length",0.1)))
glEnd()
glLineWidth(1.0)
ld,wd = item.getRotationAxis()
d = ld
self.displayCache[0].draw(drawRawLine,transform=link.getTransform(),parameters=d)
d = wd
self.displayCache[1].draw(drawRawLine,transform=(dest.getTransform()[0] if dest else so3.identity(),wp),parameters=d)
else:
#no drawing
pass
if name != None:
self.drawText(name,wp)
else:
print "Unable to draw item of type",itypes
#revert appearance
if not self.useDefaultAppearance and hasattr(item,'appearance'):
item.appearance().set(self.oldAppearance)
def getBounds(self):
"""Returns a bounding box (bmin,bmax) or None if it can't be found"""
if len(self.subAppearances)!=0:
bb = aabb_create()
for n,app in self.subAppearances.iteritems():
bb = aabb_expand(bb,app.getBounds())
return bb
item = self.item
if isinstance(item,coordinates.Point):
return [item.worldCoordinates(),item.worldCoordinates()]
elif isinstance(item,coordinates.Direction):
T = item.frame().worldCoordinates()
d = item.localCoordinates()
L = self.attributes.get("length",0.1)
return aabb_create(T[1],se3.apply(T,vectorops.mul(d,L)))
elif isinstance(item,coordinates.Frame):
T = item.worldCoordinates()
L = self.attributes.get("length",0.1)
return aabb_create(T[1],se3.apply(T,(L,0,0)),se3.apply(T,(0,L,0)),se3.apply(T,(0,0,L)))
elif isinstance(item,ContactPoint):
L = self.attributes.get("length",0.05)
return aabb_create(item.x,vectorops.madd(item.x,item.n,L))
elif isinstance(item,WorldModel):
pass
elif hasattr(item,'geometry'):
return item.geometry().getBB()
elif isinstance(item,(str,VisPlot)):
pass
else:
try:
vtype = objectToVisType(item,None)
if 'Vector3' == vtype:
#assumed to be a point
return (item,item)
elif 'RigidTransform' == vtype:
#assumed to be a rigid transform
return (item[1],item[1])
except Exception:
pass
print "Empty bound for object",self.name,"type",self.item.__class__.__name__
return aabb_create()
def getSubItem(self,path):
if len(path) == 0: return self
for k,v in self.subAppearances.iteritems():
if v.name == path[0]:
try:
return v.getSubItem(path[1:])
except ValueError,e:
raise ValueError("Invalid sub-path specified "+str(path)+" at "+str(e))
raise ValueError("Invalid sub-item specified "+path[0])
def make_editor(self):
if self.editor != None:
return
item = self.item
if isinstance(item,coordinates.Point):
res = PointPoser()
res.set(self.item.worldCoordinates())
res.setAxes(self.item.frame().worldCoordinates()[0])
elif isinstance(item,coordinates.Direction):
res = PointPoser()
res.set(self.item.worldCoordinates())
res.setAxes(self.item.frame().worldCoordinates()[0])
elif isinstance(item,coordinates.Frame):
res = TransformPoser()
res.set(*self.item.worldCoordinates())
elif isinstance(self.item,RobotModel):
res = RobotPoser(self.item)
self.hidden = True
elif isinstance(self.item,SubRobotModel):
res = RobotPoser(self.item._robot)
res.setActiveDofs(self.item.links);
self.hidden = True
elif isinstance(self.item,RigidObjectModel):
res = ObjectPoser(self.item)
elif isinstance(self.item,(list,tuple)):
#determine if it's a rotation, transform, or point
itype = objectToVisType(self.item,None)
if itype == 'Vector3':
res = PointPoser()
res.set(self.item)
elif itype == 'Matrix3':
res = TransformPoser()
res.enableRotation(True)
res.enableTranslation(False)
res.set(self.item)
elif itype == 'RigidTransform':
res = TransformPoser()
res.enableRotation(True)
res.enableTranslation(True)
res.set(*self.item)
else:
print "VisAppearance.make_editor(): Warning, editor for object of type",itype,"not defined"
return
else:
print "VisAppearance.make_editor(): Warning, editor for object of type",self.item.__class__.__name__,"not defined"
return
self.editor = res
def update_editor(self,item_to_editor=False):
for (name,item) in self.subAppearances.iteritems():
item.update_editor(item_to_editor)
if self.editor == None:
return
item = self.item
if item_to_editor:
if isinstance(item,coordinates.Point):
self.editor.set(self.item.worldCoordinates())
elif isinstance(item,coordinates.Direction):
self.editor.set(self.item.worldCoordinates())
elif isinstance(item,coordinates.Frame):
self.editor.set(*self.item.worldCoordinates())
elif isinstance(self.item,RobotModel):
self.editor.set(self.item.getConfig())
elif isinstance(self.item,SubRobotModel):
self.editor.set(self.item.tofull(self.item.getConfig()))
elif isinstance(self.item,RigidObjectModel):
self.editor.set(*self.item.getTransform())
elif isinstance(self.item,(list,tuple)):
itype = objectToVisType(self.item,None)
if itype in ('Vector3','Matrix3'):
self.editor.set(self.item)
elif itype == 'RigidTransform':
self.editor.set(*self.item)
else:
raise RuntimeError("Uh... unsupported type with an editor?")
else:
if not self.editor.hasFocus():
return
if isinstance(item,coordinates.Point):
self.item._localCoordinates = se3.apply(se3.inv(self.item._frame.worldCoordinates()),self.editor.get())
elif isinstance(item,coordinates.Direction):
self.item._localCoordinates = se3.apply(se3.inv(self.item._frame.worldCoordinates()),self.editor.get())
elif isinstance(item,coordinates.Frame):
self.item._worldCoordinates = self.editor.get()
self.item._relativeCoordinates = se3.mul(se3.inv(self.item.parent().worldCoordinates()),self.editor.get())
#TODO: updating downstream frames?
elif isinstance(self.item,RobotModel):
self.item.setConfig(self.editor.getConditioned(self.item.getConfig()))
elif isinstance(self.item,SubRobotModel):
self.item.setConfig(self.item.fromfull(self.editor.get()))
elif isinstance(self.item,RigidObjectModel):
self.item.setTransform(*self.editor.get())
elif isinstance(self.item,(tuple,list)):
def setList(a,b):
if isinstance(a,(list,tuple)) and isinstance(b,(list,tuple)):
if len(a) == len(b):
for i in xrange(len(a)):
if not setList(a[i],b[i]):
if isinstance(a,list):
a[i] = b[i]
else:
return False
return True
return False
v = self.editor.get()
if not setList(self.item,v):
self.item = v
elif isinstance(self.item,tuple):
print "Edited a tuple... maybe a point or an xform? can't actually edit"
self.item = self.editor.get()
else:
raise RuntimeError("Uh... unsupported type with an editor?")
def remove_editor(self):
self.editor = None
self.hidden = False
class VisualizationPlugin(glcommon.GLWidgetPlugin):
def __init__(self):
glcommon.GLWidgetPlugin.__init__(self)
self.items = {}
self.labels = []
self.t = time.time()
self.startTime = self.t
self.animating = True
self.currentAnimationTime = 0
self.doRefresh = False
def initialize(self):
#keep or refresh display lists?
#self._clearDisplayLists()
return glcommon.GLWidgetPlugin.initialize(self)
def addLabel(self,text,point,color):
self.labels.append((text,point,color))
def display(self):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.display(self)
self.labels = []
world = self.items.get('world',None)
if world != None: world=world.item
for (k,v) in self.items.iteritems():
v.widget = self
v.swapDrawConfig()
v.draw(world)
v.swapDrawConfig()
v.widget = None #allows garbage collector to delete these objects
#cluster label points
pointTolerance = self.view.camera.dist*0.03
pointHash = {}
for (text,point,color) in self.labels:
index = tuple([int(x/pointTolerance) for x in point])
try:
pointHash[index][1].append((text,color))
except KeyError:
pointHash[index] = [point,[(text,color)]]
for (p,items) in pointHash.itervalues():
self._drawLabelRaw(p,*zip(*items))
_globalLock.release()
def display_screen(self):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.display_screen(self)
cx = 20
cy = 20
glDisable(GL_LIGHTING)
glDisable(GL_DEPTH_TEST)
for (k,v) in self.items.iteritems():
if isinstance(v.item,VisPlot):
pos = v.attributes.get('position',None)
duration = v.attributes.get('duration',5.)
vrange = v.attributes.get('range',(None,None))
w,h = v.attributes.get('size',(200,150))
if pos is None:
v.item.render(self.window,cx,cy,w,h,duration,vrange[0],vrange[1])
cy += h+18
else:
x = pos[0]
y = pos[1]
if x < 0:
x = self.view.w + x
if y < 0:
y = self.view.h + y
v.item.render(self.window,x,y,w,h,duration,vrange[0],vrange[1])
for (k,v) in self.items.iteritems():
if isinstance(v.item,str):
pos = v.attributes.get('position',None)
col = v.attributes.get('color',(0,0,0))
size = v.attributes.get('size',12)
if pos is None:
#draw at console
self.window.draw_text((cx,cy+size),v.item,size,col)
cy += (size*15)/10
elif len(pos)==2:
x = pos[0]
y = pos[1]
if x < 0:
x = self.view.w + x
if y < 0:
y = self.view.h + y
self.window.draw_text((x,y+size),v.item,size,col)
glEnable(GL_DEPTH_TEST)
_globalLock.release()
def reshapefunc(self,w,h):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.reshapefunc(self,w,h)
_globalLock.release()
def keyboardfunc(self,c,x,y):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.keyboardfunc(self,c,x,y)
_globalLock.release()
def keyboardupfunc(self,c,x,y):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.keyboardupfunc(self,c,x,y)
_globalLock.release()
def mousefunc(self,button,state,x,y):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.mousefunc(self,button,state,x,y)
_globalLock.release()
def motionfunc(self,x,y,dx,dy):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.motionfunc(self,x,y,dx,dy)
_globalLock.release()
def eventfunc(self,type,args=""):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.eventfunc(self,type,args)
_globalLock.release()
def closefunc(self):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.closefunc(self)
_globalLock.release()
def _drawLabelRaw(self,point,textList,colorList):
#assert not self.makingDisplayList,"drawText must be called outside of display list"
assert self.window != None
for i,(text,c) in enumerate(zip(textList,colorList)):
if i+1 < len(textList): text = text+","
projpt = self.view.project(point,clip=False)
if projpt[2] > self.view.clippingplanes[0]:
d = float(12)/float(self.view.w)*projpt[2]*0.7
point = vectorops.add(point,so3.apply(so3.inv(self.view.camera.matrix()[0]),(0,-d,0)))
glDisable(GL_LIGHTING)
glDisable(GL_DEPTH_TEST)
glColor3f(*c)
self.draw_text(point,text,size=12)
glEnable(GL_DEPTH_TEST)
def _clearDisplayLists(self):
for i in self.items.itervalues():
i.clearDisplayLists()
def idle(self):
global _globalLock
_globalLock.acquire()
oldt = self.t
self.t = time.time()
if self.animating:
self.currentAnimationTime += (self.t - oldt)
for (k,v) in self.items.iteritems():
#do animation updates
v.updateAnimation(self.currentAnimationTime)
for (k,v) in self.items.iteritems():
#do other updates
v.updateTime(self.t-self.startTime)
_globalLock.release()
return False
def getItem(self,item_name):
"""Returns an VisAppearance according to the given name or path"""
if isinstance(item_name,(list,tuple)):
components = item_name
if len(components)==1:
return self.getItem(components[0])
if components[0] not in self.items:
raise ValueError("Invalid top-level item specified: "+item_name)
return self.items[components[0]].getSubItem(components[1:])
if item_name in self.items:
return self.items[item_name]
def dirty(self,item_name='all'):
"""Marks an item or everything as dirty, forcing a deep redraw."""
global _globalLock
_globalLock.acquire()
if item_name == 'all':
if (name,itemvis) in self.items.iteritems():
itemvis.markChanged()
else:
self.getItem(item_name).markChanged()
_globalLock.release()
def clear(self):
"""Clears the visualization world"""
global _globalLock
_globalLock.acquire()
for (name,itemvis) in self.items.iteritems():
itemvis.destroy()
self.items = {}
_globalLock.release()
def clearText(self):
"""Clears all text in the visualization."""
global _globalLock
_globalLock.acquire()
del_items = []
for (name,itemvis) in self.items.iteritems():
if isinstance(itemvis.item,str):
itemvis.destroy()
del_items.append(name)
for n in del_items:
del self.items[n]
_globalLock.release()
def listItems(self,root=None,indent=0):
"""Prints out all items in the visualization world."""
if root == None:
for name,value in self.items.iteritems():
self.listItems(value,indent)
else:
if isinstance(root,str):
root = self.getItem(root)
if indent > 0:
print " "*(indent-1),
print root.name
for n,v in root.subAppearances.iteritems():
self.listItems(v,indent+2)
def add(self,name,item,keepAppearance=False):
"""Adds a named item to the visualization world. If the item already
exists, the appearance information will be reinitialized if keepAppearance=False
(default) or be kept if keepAppearance=True."""
global _globalLock
assert not isinstance(name,(list,tuple)),"Cannot add sub-path items"
_globalLock.acquire()
if keepAppearance and name in self.items:
self.items[name].setItem(item)
else:
#need to erase prior item visualizer
if name in self.items:
self.items[name].destroy()
app = VisAppearance(item,name)
self.items[name] = app
_globalLock.release()
#self.refresh()
def animate(self,name,animation,speed=1.0,endBehavior='loop'):
global _globalLock
_globalLock.acquire()
if hasattr(animation,'__iter__'):
#a list of milestones -- loop through them with 1s delay
print "visualization.animate(): Making a Trajectory with unit durations between",len(animation),"milestones"
animation = Trajectory(range(len(animation)),animation)
if isinstance(animation,HermiteTrajectory):
animation = animation.configTrajectory()
item = self.getItem(name)
item.animation = animation
item.animationStartTime = self.currentAnimationTime
item.animationSpeed = speed
item.animationEndBehavior = endBehavior
item.markChanged()
_globalLock.release()
def pauseAnimation(self,paused=True):
global _globalLock
_globalLock.acquire()
self.animating = not paused
_globalLock.release()
def stepAnimation(self,amount):
global _globalLock
_globalLock.acquire()
self.currentAnimationTime += amount
self.doRefresh = True
_globalLock.release()
def animationTime(self,newtime=None):
global _globalLock
if self==None:
print "Visualization disabled"
return 0
if newtime != None:
_globalLock.acquire()
self.currentAnimationTime = newtime
_globalLock.release()
return self.currentAnimationTime
def remove(self,name):
global _globalLock
_globalLock.acquire()
assert name in self.items,"Can only remove top level objects from visualization, try hide() instead"
item = self.getItem(name)
item.destroy()
del self.items[name]
self.doRefresh = True
_globalLock.release()
def getItemConfig(self,name):
global _globalLock
_globalLock.acquire()
res = config.getConfig(self.getItem(name).item)
_globalLock.release()
return res
def setItemConfig(self,name,value):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
if isinstance(item.item,(list,tuple,str)):
item.item = value
else:
config.setConfig(item.item,value)
if item.editor:
item.update_editor(item_to_editor = True)
self.doRefresh = True
_globalLock.release()
def hideLabel(self,name,hidden=True):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.attributes["text_hidden"] = hidden
item.markChanged()
self.doRefresh = True
_globalLock.release()
def edit(self,name,doedit=True):
global _globalLock
_globalLock.acquire()
obj = self.getItem(name)
if obj == None:
_globalLock.release()
raise ValueError("Object "+name+" does not exist in visualization")
if doedit:
obj.make_editor()
if obj.editor:
self.klamptwidgetmaster.add(obj.editor)
else:
if obj.editor:
self.klamptwidgetmaster.remove(obj.editor)
obj.remove_editor()
self.doRefresh = True
_globalLock.release()
def widgetchangefunc(self,edit):
"""Called by GLWidgetPlugin on any widget change"""
for name,item in self.items.iteritems():
item.update_editor()
def hide(self,name,hidden=True):
global _globalLock
_globalLock.acquire()
self.getItem(name).hidden = hidden
self.doRefresh = True
_globalLock.release()
def addPlotItem(self,plotname,itemname):
global _globalLock
_globalLock.acquire()
plot = self.getItem(plotname)
assert plot != None and isinstance(plot.item,VisPlot),(plotname+" is not a valid plot")
plot = plot.item
for i in plot.items:
assert i.name != itemname,(str(itemname)+" is already in the plot "+plotname)
item = self.getItem(itemname)
assert item != None,(str(itemname)+" is not a valid item")
plot.items.append(VisPlotItem(itemname,item))
_globalLock.release()
def logPlot(self,plotname,itemname,value):
global _globalLock
_globalLock.acquire()
customIndex = -1
plot = self.getItem(plotname)
assert plot != None and isinstance(plot.item,VisPlot),(plotname+" is not a valid plot")
compress = plot.attributes.get('compress',_defaultCompressThreshold)
plot = plot.item
for i,item in enumerate(plot.items):
if len(item.name)==0:
customIndex = i
if customIndex < 0:
customIndex = len(plot.items)
plot.items.append(VisPlotItem('',None))
plot.items[customIndex].compressThreshold = compress
plot.items[customIndex].customUpdate(itemname,self.t - self.startTime,value)
_globalLock.release()
def logPlotEvent(self,plotname,eventname,color):
global _globalLock
_globalLock.acquire()
plot = self.getItem(plotname)
assert plot != None and isinstance(plot.item,VisPlot),(plotname+" is not a valid plot")
plot.item.addEvent(eventname,self.t-self.startTime,color)
_globalLock.release()
def hidePlotItem(self,plotname,itemname,hidden=True):
global _globalLock
_globalLock.acquire()
plot = self.getItem(plotname)
assert plot != None and isinstance(plot.item,VisPlot),plotname+" is not a valid plot"
plot = plot.item
identified = False
if isinstance(itemname,(tuple,list)):
for i in plot.items:
if i.name == itemname[0]:
assert itemname[1] < len(i.hidden),("Invalid component index of item "+str(itemname[0]))
identified = True
i.hidden[itemname] = hidden
else:
for i in plot.items:
if i.name == itemname:
for j in xrange(len(i.hidden)):
i.hidden[j] = hidden
assert identified,("Invalid item "+str(itemname)+" specified in plot "+plotname)
self.doRefresh = True
_globalLock.release()
def savePlot(self,plotname,fn):
global _globalLock
_globalLock.acquire()
plot = self.getItem(plotname)
assert plot != None and isinstance(plot.item,VisPlot),plotname+" is not a valid plot"
plot = plot.item
if fn != None:
plot.beginSave(fn)
else:
plot.endSave(fn)
_globalLock.release()
def setAppearance(self,name,appearance):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.useDefaultAppearance = False
item.customAppearance = appearance
item.markChanged()
self.doRefresh = True
_globalLock.release()
def setAttribute(self,name,attr,value):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.attributes[attr] = value
if value==None:
del item.attributes[attr]
item.markChanged()
self.doRefresh = True
_globalLock.release()
def revertAppearance(self,name):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.useDefaultApperance = True
item.markChanged()
self.doRefresh = True
_globalLock.release()
def setColor(self,name,r,g,b,a=1.0):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.attributes["color"] = [r,g,b,a]
item.useDefaultAppearance = False
item.markChanged()
self.doRefresh = True
_globalLock.release()
def setDrawFunc(self,name,func):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.customDrawFunc = func
self.doRefresh = True
_globalLock.release()
def autoFitCamera(self,scale=1.0):
vp = None
if self.window == None:
global _frontend
vp = _frontend.get_view()
else:
vp = self.window.get_view()
try:
autoFitViewport(vp,self.items.values())
vp.camera.dist /= scale
except Exception as e:
print "Unable to auto-fit camera"
print e
_vis = VisualizationPlugin()
_frontend.setPlugin(_vis)
_quit = False
_thread_running = False
if _PyQtAvailable:
from PyQt4 import QtGui
#Qt specific startup
#need to set up a QDialog and an QApplication
class _MyDialog(QDialog):
def __init__(self,windowinfo):
QDialog.__init__(self)
self.windowinfo = windowinfo
glwidget = windowinfo.glwindow
glwidget.setMinimumSize(640,480)
glwidget.setMaximumSize(4000,4000)
glwidget.setSizePolicy(QSizePolicy(QSizePolicy.Maximum,QSizePolicy.Maximum))
self.description = QLabel("Press OK to continue")
self.description.setSizePolicy(QSizePolicy(QSizePolicy.Preferred,QSizePolicy.Fixed))
self.layout = QVBoxLayout(self)
self.layout.addWidget(glwidget)
self.layout.addWidget(self.description)
self.buttons = QDialogButtonBox(QDialogButtonBox.Ok,Qt.Horizontal, self)
self.buttons.accepted.connect(self.accept)
self.layout.addWidget(self.buttons)
self.setWindowTitle(windowinfo.name)
glwidget.name = windowinfo.name
def accept(self):
global _globalLock
_globalLock.acquire()
self.windowinfo.glwindow.hide()
_globalLock.release()
print "#########################################"
print "klampt.vis: Dialog accept"
print "#########################################"
return QDialog.accept(self)
def reject(self):
global _globalLock
_globalLock.acquire()
self.windowinfo.glwindow.hide()
print "#########################################"
print "klampt.vis: Dialog reject"
print "#########################################"
_globalLock.release()
return QDialog.reject(self)
class _MyWindow(QMainWindow):
def __init__(self,windowinfo):
QMainWindow.__init__(self)
self.windowinfo = windowinfo
self.glwidget = windowinfo.glwindow
self.glwidget.setMinimumSize(self.glwidget.width,self.glwidget.height)
self.glwidget.setMaximumSize(4000,4000)
self.glwidget.setSizePolicy(QSizePolicy(QSizePolicy.Maximum,QSizePolicy.Maximum))
self.setCentralWidget(self.glwidget)
self.setWindowTitle(windowinfo.name)
self.glwidget.name = windowinfo.name
self.saving_movie = False
self.movie_timer = QTimer(self)
self.movie_timer.timeout.connect(self.movie_update)
self.movie_frame = 0
self.movie_time_last = 0
self.saving_html = False
self.html_saver = None
self.html_start_time = 0
self.html_timer = QTimer(self)
self.html_timer.timeout.connect(self.html_update)
#TODO: for action-free programs, don't add this... but this has to be detected after initializeGL()?
mainMenu = self.menuBar()
fileMenu = mainMenu.addMenu('&Actions')
self.glwidget.actionMenu = fileMenu
visMenu = mainMenu.addMenu('&Visualization')
a = QtGui.QAction('Save world...', self)
a.setStatusTip('Saves world to xml file')
a.triggered.connect(self.save_world)
visMenu.addAction(a)
a = QtGui.QAction('Add to world...', self)
a.setStatusTip('Adds an item to the world')
a.triggered.connect(self.add_to_world)
visMenu.addAction(a)
a = QtGui.QAction('Save camera...', self)
a.setStatusTip('Saves camera settings')
a.triggered.connect(self.save_camera)
visMenu.addAction(a)
a = QtGui.QAction('Load camera...', self)
a.setStatusTip('Loads camera settings')
a.triggered.connect(self.load_camera)
visMenu.addAction(a)
a = QtGui.QAction('Start/stop movie output', self)
a.setShortcut('Ctrl+M')
a.setStatusTip('Starts / stops saving movie frames')
a.triggered.connect(self.toggle_movie_mode)
visMenu.addAction(a)
a = QtGui.QAction('Start/stop html output', self)
a.setShortcut('Ctrl+H')
a.setStatusTip('Starts / stops saving animation to HTML file')
a.triggered.connect(self.toggle_html_mode)
visMenu.addAction(a)
def getWorld(self):
if not hasattr(self.glwidget.program,'plugins'):
return None
for p in self.glwidget.program.plugins:
if hasattr(p,'world'):
return p.world
elif isinstance(p,VisualizationPlugin):
world = p.items.get('world',None)
if world != None: return world.item
return None
def getSimulator(self):
if not hasattr(self.glwidget.program,'plugins'):
return None
for p in self.glwidget.program.plugins:
if hasattr(p,'sim'):
return p.sim
return None
def save_camera(self):
if not hasattr(self.glwidget.program,'get_view'):
print "Program does not appear to have a camera"
return
v = self.glwidget.program.get_view()
fn = QFileDialog.getSaveFileName(caption="Viewport file (*.txt)",filter="Viewport file (*.txt);;All files (*.*)")
if fn is None:
return
f = open(str(fn),'w')
f.write("VIEWPORT\n")
f.write("FRAME %d %d %d %d\n"%(v.x,v.y,v.w,v.h))
f.write("PERSPECTIVE 1\n")
aspect = float(v.w)/float(v.h)
rfov = v.fov*math.pi/180.0
scale = 1.0/(2.0*math.tan(rfov*0.5/aspect)*aspect)
f.write("SCALE %f\n"%(scale,))
f.write("NEARPLANE %f\n"%(v.clippingplanes[0],))
f.write("FARPLANE %f\n"%(v.clippingplanes[0],))
f.write("CAMTRANSFORM ")
mat = se3.homogeneous(v.camera.matrix())
f.write(' '.join(str(v) for v in sum(mat,[])))
f.write('\n')
f.write("ORBITDIST %f\n"%(v.camera.dist,))
f.close()
def load_camera(self):
print "TODO"
def save_world(self):
w = self.getWorld()
if w is None:
print "Program does not appear to have a world"
fn = QFileDialog.getSaveFileName(caption="World file (elements will be saved to folder)",filter="World file (*.xml);;All files (*.*)")
if fn != None:
w.saveFile(str(fn))
print "Saved to",fn,"and elements were saved to a directory of the same name."
def add_to_world(self):
w = self.getWorld()
if w is None:
print "Program does not appear to have a world"
fn = QFileDialog.getOpenFileName(caption="World element",filter="Robot file (*.rob *.urdf);;Object file (*.obj);;Terrain file (*.env *.off *.obj *.stl *.wrl);;All files (*.*)")
if fn != None:
w.loadElement(str(fn))
for p in self.glwidget.program.plugins:
if isinstance(p,VisualizationPlugin):
p.getItem('world').setItem(w)
def toggle_movie_mode(self):
self.saving_movie = not self.saving_movie
if self.saving_movie:
self.movie_timer.start(33)
sim = self.getSimulator()
if sim != None:
self.movie_time_last = sim.getTime()
else:
self.movie_timer.stop()
dlg = QtGui.QInputDialog(self)
dlg.setInputMode( QtGui.QInputDialog.TextInput)
dlg.setLabelText("Command")
dlg.setTextValue('ffmpeg -y -f image2 -i image%04d.png klampt_record.mp4')
dlg.resize(500,100)
ok = dlg.exec_()
cmd = dlg.textValue()
#(cmd,ok) = QtGui.QInputDialog.getText(self,"Process with ffmpeg?","Command", text='ffmpeg -y -f image2 -i image%04d.png klampt_record.mp4')
if ok:
import os,glob
os.system(str(cmd))
print "Removing temporary files"
for fn in glob.glob('image*.png'):
os.remove(fn)
def movie_update(self):
sim = self.getSimulator()
if sim != None:
while sim.getTime() >= self.movie_time_last + 1.0/30.0:
self.glwidget.program.save_screen('image%04d.png'%(self.movie_frame))
self.movie_frame += 1
self.movie_time_last += 1.0/30.0
else:
self.glwidget.program.save_screen('image%04d.png'%(self.movie_frame))
self.movie_frame += 1
def toggle_html_mode(self):
self.saving_html = not self.saving_html
if self.saving_html:
world = self.getSimulator()
if world is None:
world = self.getWorld()
if world is None:
print "There is no world in the current plugin, can't save"
self.saving_html = False
return
fn = QFileDialog.getSaveFileName(caption="Save path HTML file to...",filter="HTML file (*.html);;All files (*.*)")
if fn is None:
self.saving_html = False
return
from ..io import html
self.html_start_time = time.time()
self.html_saver = html.HTMLSharePath(fn)
self.html_saver.dt = 0.033;
self.html_saver.start(world)
self.html_timer.start(33)
else:
self.html_saver.end()
self.html_timer.stop()
def html_update(self):
t = None
if self.html_saver.sim == None:
#t = time.time()-self.html_start_time
t = self.html_saver.last_t + 0.034
self.html_saver.animate(t)
def closeEvent(self,event):
global _globalLock
_globalLock.acquire()
self.windowinfo.glwindow.hide()
self.windowinfo.mode = 'hidden'
self.windowinfo.glwindow.idlesleep()
self.windowinfo.glwindow.setParent(None)
if self.saving_movie:
self.toggle_movie_mode()
if self.saving_html:
self.toggle_html_mode()
print "#########################################"
print "klampt.vis: Window close"
print "#########################################"
_globalLock.release()
def _run_app_thread():
global _thread_running,_vis,_widget,_window,_quit,_showdialog,_showwindow,_globalLock
_thread_running = True
_GLBackend.initialize("Klamp't visualization")
res = None
while not _quit:
_globalLock.acquire()
for i,w in enumerate(_windows):
if w.glwindow == None and w.mode != 'hidden':
print "vis: creating GL window"
w.glwindow = _GLBackend.createWindow(w.name)
w.glwindow.setProgram(w.frontend)
w.glwindow.setParent(None)
w.glwindow.refresh()
if w.doRefresh:
if w.mode != 'hidden':
w.glwindow.updateGL()
w.doRefresh = False
if w.doReload and w.glwindow != None:
w.glwindow.setProgram(w.frontend)
if w.guidata:
w.guidata.setWindowTitle(w.name)
w.guidata.glwidget = w.glwindow
w.guidata.setCentralWidget(w.glwindow)
w.doReload = False
if w.mode == 'dialog':
print "#########################################"
print "klampt.vis: Dialog on window",i
print "#########################################"
if w.custom_ui == None:
dlg = _MyDialog(w)
else:
dlg = w.custom_ui(w.glwindow)
#need to cache the bastards to avoid deleting the GL object. Not sure why it's being kept around.
#alldlgs.append(dlg)
#here's the crash -- above line deleted the old dialog, which for some reason kills the widget
if dlg != None:
w.glwindow.show()
w.glwindow.idlesleep(0)
w.glwindow.refresh()
w.glwindow.refresh()
_globalLock.release()
res = dlg.exec_()
_globalLock.acquire()
print "#########################################"
print "klampt.vis: Dialog done on window",i
print "#########################################"
w.glwindow.hide()
w.glwindow.setParent(None)
w.glwindow.idlesleep()
w.mode = 'hidden'
if w.mode == 'shown' and w.guidata == None:
print "#########################################"
print "klampt.vis: Making window",i
print "#########################################"
if w.custom_ui == None:
w.guidata = _MyWindow(w)
else:
w.guidata = w.custom_ui(w.glwindow)
w.glwindow.show()
w.glwindow.idlesleep(0)
if w.mode == 'shown' and not w.guidata.isVisible():
print "#########################################"
print "klampt.vis: Showing window",i
print "#########################################"
w.glwindow.show()
w.glwindow.setParent(w.guidata)
w.glwindow.idlesleep(0)
w.guidata.show()
if w.mode == 'hidden' and w.guidata != None:
if w.guidata.isVisible():
print "#########################################"
print "klampt.vis: Hiding window",i
print "#########################################"
w.glwindow.setParent(None)
w.glwindow.idlesleep()
w.glwindow.hide()
w.guidata.hide()
#prevent deleting the GL window
w.glwindow.setParent(None)
w.guidata = None
_globalLock.release()
_GLBackend.app.processEvents()
time.sleep(0.001)
print "Visualization thread closing..."
for w in _windows:
w.vis.clear()
if w.glwindow:
w.glwindow.close()
_thread_running = False
return res
elif _GLUTAvailable:
print "klampt.visualization: QT is not available, falling back to poorer"
print "GLUT interface. Returning to another GLUT thread will not work"
print "properly."
print ""
class GLUTHijacker(GLPluginProgram):
def __init__(self,windowinfo):
GLPluginProgram.__init__(self)
self.windowinfo = windowinfo
self.name = windowinfo.name
self.view = windowinfo.frontend.view
self.clearColor = windowinfo.frontend.clearColor
self.actions = windowinfo.frontend.actions
self.frontend = windowinfo.frontend
self.inDialog = False
self.hidden = False
def initialize(self):
self.frontend.window = self.window
if not self.frontend.initialize(): return False
GLPluginProgram.initialize(self)
return True
def display(self):
global _globalLock
_globalLock.acquire()
self.frontend.display()
_globalLock.release()
return True
def display_screen(self):
global _globalLock
_globalLock.acquire()
self.frontend.display_screen()
glColor3f(1,1,1)
glRasterPos(20,50)
gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_18,"(Do not close this window except to quit)")
if self.inDialog:
glColor3f(1,1,0)
glRasterPos(20,80)
gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_18,"In Dialog mode. Press 'Esc' to return to normal mode")
else:
glColor3f(1,1,0)
glRasterPos(20,80)
gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_18,"In Window mode. Press 'Esc' to hide window")
_globalLock.release()
def keyboardfunc(self,c,x,y):
if ord(c)==27:
if self.inDialog:
print "Esc pressed, hiding dialog"
self.inDialog = False
else:
print "Esc pressed, hiding window"
global _globalLock
_globalLock.acquire()
self.windowinfo.mode = 'hidden'
self.hidden = True
glutHideWindow()
_globalLock.release()
return True
else:
return self.frontend.keyboardfunc(c,x,y)
def keyboardupfunc(self,c,x,y):
return self.frontend.keyboardupfunc(c,x,y)
def motionfunc(self,x,y,dx,dy):
return self.frontend.motionfunc(x,y,dx,dy)
def mousefunc(self,button,state,x,y):
return self.frontend.mousefunc(button,state,x,y)
def idlefunc(self):
global _quit,_showdialog
global _globalLock
_globalLock.acquire()
if _quit:
if bool(glutLeaveMainLoop):
glutLeaveMainLoop()
else:
print "Not compiled with freeglut, can't exit main loop safely. Press Ctrl+C instead"
raw_input()
if self.hidden:
print "hidden, waiting...",self.windowinfo.mode
if self.windowinfo.mode == 'shown':
print "Showing window"
glutSetWindow(self.window.glutWindowID)
glutShowWindow()
self.hidden = False
elif self.windowinfo.mode == 'dialog':
print "Showing window in dialog mode"
self.inDialog = True
glutSetWindow(self.window.glutWindowID)
glutShowWindow()
self.hidden = False
_globalLock.release()
return self.frontend.idlefunc()
def _run_app_thread():
global _thread_running,_vis,_old_glut_window,_quit,_windows
import weakref
_thread_running = True
_GLBackend.initialize("Klamp't visualization")
w = _GLBackend.createWindow("Klamp't visualization")
hijacker = GLUTHijacker(_windows[0])
_windows[0].guidata = weakref.proxy(hijacker)
w.setProgram(hijacker)
_GLBackend.run()
print "Visualization thread closing..."
for w in _windows:
w.vis.clear()
_thread_running = False
return
def _kill():
global _quit
_quit = True
while _thread_running:
time.sleep(0.01)
_quit = False
if _PyQtAvailable:
from PyQt4 import QtCore
class MyQThread(QtCore.QThread):
def __init__(self,func,*args):
self.func = func
self.args = args
QtCore.QThread.__init__(self)
def run(self):
self.func(*self.args)
def _show():
global _windows,_current_window,_thread_running
if len(_windows)==0:
_windows.append(WindowInfo(_window_title,_frontend,_vis))
_current_window = 0
_windows[_current_window].mode = 'shown'
_windows[_current_window].worlds = _current_worlds
_windows[_current_window].active_worlds = _current_worlds[:]
if not _thread_running:
signal.signal(signal.SIGINT, signal.SIG_DFL)
if _PyQtAvailable and False:
#for some reason, QThread doesn't allow for mouse events to be posted?
thread = MyQThread(_run_app_thread)
thread.start()
else:
thread = Thread(target=_run_app_thread)
thread.setDaemon(True)
thread.start()
time.sleep(0.1)
def _hide():
global _windows,_current_window,_thread_running
if _current_window == None:
return
_windows[_current_window].mode = 'hidden'
def _dialog():
global __windows,_current_window,_thread_running
if len(_windows)==0:
_windows.append(WindowInfo(_window_title,_frontend,_vis,None))
_current_window = 0
if not _thread_running:
signal.signal(signal.SIGINT, signal.SIG_DFL)
thread = Thread(target=_run_app_thread)
thread.setDaemon(True)
thread.start()
#time.sleep(0.1)
_globalLock.acquire()
assert _windows[_current_window].mode == 'hidden',"dialog() called inside dialog?"
_windows[_current_window].mode = 'dialog'
_windows[_current_window].worlds = _current_worlds
_windows[_current_window].active_worlds = _current_worlds[:]
_globalLock.release()
while _windows[_current_window].mode == 'dialog':
time.sleep(0.1)
return
def _set_custom_ui(func):
global _windows,_current_window,_thread_running
if len(_windows)==0:
_windows.append(WindowInfo(_window_title,_frontend,_vis,None))
_current_window = 0
_windows[_current_window].custom_ui = func
return
def _onFrontendChange():
global _windows,_frontend,_window_title,_current_window,_thread_running
if _current_window == None:
return
w = _windows[_current_window]
w.doReload = True
w.name = _window_title
w.frontend = _frontend
if w.glwindow:
w.glwindow.reshape(_frontend.view.w,_frontend.view.h)
if w.guidata and not _PyQtAvailable:
w.guidata.frontend = _frontend
_frontend.window = w.guidata.window
def _refreshDisplayLists(item):
if isinstance(item,WorldModel):
for i in xrange(item.numRobots()):
_refreshDisplayLists(item.robot(i))
for i in xrange(item.numRigidObjects()):
_refreshDisplayLists(item.rigidObject(i))
for i in xrange(item.numTerrains()):
_refreshDisplayLists(item.terrain(i))
elif isinstance(item,RobotModel):
for i in xrange(item.numLinks()):
_refreshDisplayLists(item.link(i))
elif hasattr(item,'appearance'):
item.appearance().refresh(False)
def _checkWindowCurrent(item):
global _windows,_current_window,_world_to_window,_current_worlds
if isinstance(item,int):
if not all(w.index != item for w in _current_worlds):
print "klampt.vis: item appears to be in a new world, but doesn't have a full WorldModel instance"
if isinstance(item,WorldModel):
#print "Worlds active in current window",_current_window,":",[w().index for w in _current_worlds]
if all(item != w() for w in _current_worlds):
#PyQt interface allows sharing display lists but GLUT does not.
#refresh all worlds' display lists that will be shifted to the current window.
for i,win in enumerate(_windows):
#print "Window",i,"active worlds",[w().index for w in win.active_worlds]
if any(item == w() for w in win.active_worlds):
if not _PyQtAvailable:
print "klampt.vis: world",item.index,"was shown in a different window, now refreshing display lists"
_refreshDisplayLists(item)
win.active_worlds.remove(weakref.ref(item))
_current_worlds.append(weakref.ref(item))
#print "klampt.vis: world added to the visualization's world (items:",[w().index for w in _current_worlds],")"
#else:
# print "klampt.vis: world",item,"is already in the current window's world"
elif hasattr(item,'world'):
_checkWindowCurrent(item.world)
|
'''visualize morphologies'''
from matplotlib.collections import LineCollection, PolyCollection
from matplotlib.patches import Circle
from mpl_toolkits.mplot3d.art3d import \
Line3DCollection # pylint: disable=relative-import
import numpy as np
from neurom import NeuriteType, geom
from neurom._compat import zip
from neurom.core import iter_neurites, iter_segments
from neurom.core._soma import SomaCylinders
from neurom.core.dataformat import COLS
from neurom.core.types import tree_type_checker
from neurom.morphmath import segment_radius
from neurom.view._dendrogram import Dendrogram
from . import common
_LINEWIDTH = 1.2
_ALPHA = 0.8
_DIAMETER_SCALE = 1.0
TREE_COLOR = {NeuriteType.basal_dendrite: 'red',
NeuriteType.apical_dendrite: 'purple',
NeuriteType.axon: 'blue',
NeuriteType.soma: 'black',
NeuriteType.undefined: 'green'}
def _plane2col(plane):
'''take a string like 'xy', and return the indices from COLS.*'''
planes = ('xy', 'yx', 'xz', 'zx', 'yz', 'zy')
assert plane in planes, 'No such plane found! Please select one of: ' + str(planes)
return (getattr(COLS, plane[0].capitalize()),
getattr(COLS, plane[1].capitalize()), )
def _get_linewidth(tree, linewidth, diameter_scale):
'''calculate the desired linewidth based on tree contents
If diameter_scale exists, it is used to scale the diameter of each of the segments
in the tree
If diameter_scale is None, the linewidth is used.
'''
if diameter_scale is not None and tree:
linewidth = [2 * segment_radius(s) * diameter_scale
for s in iter_segments(tree)]
return linewidth
def _get_color(treecolor, tree_type):
"""if treecolor set, it's returned, otherwise tree_type is used to return set colors"""
if treecolor is not None:
return treecolor
return TREE_COLOR.get(tree_type, 'green')
def plot_tree(ax, tree, plane='xy',
diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH,
color=None, alpha=_ALPHA):
'''Plots a 2d figure of the tree's segments
Args:
ax(matplotlib axes): on what to plot
tree(neurom.core.Tree or neurom.core.Neurite): plotted tree
plane(str): Any pair of 'xyz'
diameter_scale(float): Scale factor multiplied with segment diameters before plotting
linewidth(float): all segments are plotted with this width, but only if diameter_scale=None
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
Note:
If the tree contains one single point the plot will be empty
since no segments can be constructed.
'''
plane0, plane1 = _plane2col(plane)
segs = [((s[0][plane0], s[0][plane1]),
(s[1][plane0], s[1][plane1]))
for s in iter_segments(tree)]
linewidth = _get_linewidth(tree, diameter_scale=diameter_scale, linewidth=linewidth)
color = _get_color(color, tree.type)
collection = LineCollection(segs, color=color, linewidth=linewidth, alpha=alpha)
ax.add_collection(collection)
def plot_soma(ax, soma, plane='xy',
soma_outline=True,
linewidth=_LINEWIDTH,
color=None, alpha=_ALPHA):
'''Generates a 2d figure of the soma.
Args:
ax(matplotlib axes): on what to plot
soma(neurom.core.Soma): plotted soma
plane(str): Any pair of 'xyz'
diameter_scale(float): Scale factor multiplied with segment diameters before plotting
linewidth(float): all segments are plotted with this width, but only if diameter_scale=None
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
'''
plane0, plane1 = _plane2col(plane)
color = _get_color(color, tree_type=NeuriteType.soma)
if isinstance(soma, SomaCylinders):
plane0, plane1 = _plane2col(plane)
for start, end in zip(soma.points, soma.points[1:]):
common.project_cylinder_onto_2d(ax, (plane0, plane1),
start=start[COLS.XYZ], end=end[COLS.XYZ],
start_radius=start[COLS.R], end_radius=end[COLS.R],
color=color, alpha=alpha)
else:
if soma_outline:
ax.add_artist(Circle(soma.center, soma.radius, color=color, alpha=alpha))
else:
plane0, plane1 = _plane2col(plane)
points = [(p[plane0], p[plane1]) for p in soma.iter()]
if points:
points.append(points[0]) # close the loop
ax.plot(points, color=color, alpha=alpha, linewidth=linewidth)
ax.set_xlabel(plane[0])
ax.set_ylabel(plane[1])
bounding_box = geom.bounding_box(soma)
ax.dataLim.update_from_data_xy(np.vstack(([bounding_box[0][plane0], bounding_box[0][plane1]],
[bounding_box[1][plane0], bounding_box[1][plane1]])),
ignore=False)
def plot_neuron(ax, nrn,
neurite_type=NeuriteType.all,
plane='xy',
soma_outline=True,
diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH,
color=None, alpha=_ALPHA):
'''Plots a 2D figure of the neuron, that contains a soma and the neurites
Args:
ax(matplotlib axes): on what to plot
neurite_type(NeuriteType): an optional filter on the neurite type
nrn(neuron): neuron to be plotted
soma_outline(bool): should the soma be drawn as an outline
plane(str): Any pair of 'xyz'
diameter_scale(float): Scale factor multiplied with segment diameters before plotting
linewidth(float): all segments are plotted with this width, but only if diameter_scale=None
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
'''
plot_soma(ax, nrn.soma, plane=plane, soma_outline=soma_outline, linewidth=linewidth,
color=color, alpha=alpha)
for neurite in iter_neurites(nrn, filt=tree_type_checker(neurite_type)):
plot_tree(ax, neurite, plane=plane,
diameter_scale=diameter_scale, linewidth=linewidth,
color=color, alpha=alpha)
ax.set_title(nrn.name)
ax.set_xlabel(plane[0])
ax.set_ylabel(plane[1])
def _update_3d_datalim(ax, obj):
'''unlike w/ 2d Axes, the dataLim isn't set by collections, so it has to be updated manually'''
min_bounding_box, max_bounding_box = geom.bounding_box(obj)
xy_bounds = np.vstack((min_bounding_box[:COLS.Z],
max_bounding_box[:COLS.Z]))
ax.xy_dataLim.update_from_data_xy(xy_bounds, ignore=False)
z_bounds = np.vstack(((min_bounding_box[COLS.Z], min_bounding_box[COLS.Z]),
(max_bounding_box[COLS.Z], max_bounding_box[COLS.Z])))
ax.zz_dataLim.update_from_data_xy(z_bounds, ignore=False)
def plot_tree3d(ax, tree,
diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH,
color=None, alpha=_ALPHA):
'''Generates a figure of the tree in 3d.
If the tree contains one single point the plot will be empty \
since no segments can be constructed.
Args:
ax(matplotlib axes): on what to plot
tree(neurom.core.Tree or neurom.core.Neurite): plotted tree
diameter_scale(float): Scale factor multiplied with segment diameters before plotting
linewidth(float): all segments are plotted with this width, but only if diameter_scale=None
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
'''
segs = [(s[0][COLS.XYZ], s[1][COLS.XYZ]) for s in iter_segments(tree)]
linewidth = _get_linewidth(tree, diameter_scale=diameter_scale, linewidth=linewidth)
color = _get_color(color, tree.type)
collection = Line3DCollection(segs, color=color, linewidth=linewidth, alpha=alpha)
ax.add_collection3d(collection)
_update_3d_datalim(ax, tree)
def plot_soma3d(ax, soma, color=None, alpha=_ALPHA):
'''Generates a 3d figure of the soma.
Args:
ax(matplotlib axes): on what to plot
soma(neurom.core.Soma): plotted soma
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
'''
color = _get_color(color, tree_type=NeuriteType.soma)
if isinstance(soma, SomaCylinders):
for start, end in zip(soma.points, soma.points[1:]):
common.plot_cylinder(ax,
start=start[COLS.XYZ], end=end[COLS.XYZ],
start_radius=start[COLS.R], end_radius=end[COLS.R],
color=color, alpha=alpha)
else:
common.plot_sphere(ax, center=soma.center[COLS.XYZ], radius=soma.radius,
color=color, alpha=alpha)
# unlike w/ 2d Axes, the dataLim isn't set by collections, so it has to be updated manually
_update_3d_datalim(ax, soma)
def plot_neuron3d(ax, nrn, neurite_type=NeuriteType.all,
diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH,
color=None, alpha=_ALPHA):
'''
Generates a figure of the neuron,
that contains a soma and a list of trees.
Args:
ax(matplotlib axes): on what to plot
nrn(neuron): neuron to be plotted
neurite_type(NeuriteType): an optional filter on the neurite type
diameter_scale(float): Scale factor multiplied with segment diameters before plotting
linewidth(float): all segments are plotted with this width, but only if diameter_scale=None
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
'''
plot_soma3d(ax, nrn.soma, color=color, alpha=alpha)
for neurite in iter_neurites(nrn, filt=tree_type_checker(neurite_type)):
plot_tree3d(ax, neurite,
diameter_scale=diameter_scale, linewidth=linewidth,
color=color, alpha=alpha)
ax.set_title(nrn.name)
def _generate_collection(group, ax, ctype, colors):
'''Render rectangle collection'''
color = TREE_COLOR[ctype]
# generate segment collection
collection = PolyCollection(group, closed=False, antialiaseds=True,
edgecolors='face', facecolors=color)
# add it to the axes
ax.add_collection(collection)
# dummy plot for the legend
if color not in colors:
label = str(ctype).replace('NeuriteType.', '').replace('_', ' ').capitalize()
ax.plot((0., 0.), (0., 0.), c=color, label=label)
colors.add(color)
def _render_dendrogram(dnd, ax, displacement):
'''Renders dendrogram'''
# set of unique colors that reflect the set of types of the neurites
colors = set()
for n, (indices, ctype) in enumerate(zip(dnd.groups, dnd.types)):
# slice rectangles array for the current neurite
group = dnd.data[indices[0]:indices[1]]
if n > 0:
# displace the neurites by half of their maximum x dimension
# plus half of the previous neurite's maxmimum x dimension
displacement += 0.5 * (dnd.dims[n - 1][0] + dnd.dims[n][0])
# arrange the trees without overlapping with each other
group += (displacement, 0.)
# create the polygonal collection of the dendrogram
# segments
_generate_collection(group, ax, ctype, colors)
soma_square = dnd.soma
if soma_square is not None:
_generate_collection((soma_square + (displacement / 2., 0.),), ax, NeuriteType.soma, colors)
ax.plot((displacement / 2., displacement), (0., 0.), color='k')
ax.plot((0., displacement / 2.), (0., 0.), color='k')
return displacement
def plot_dendrogram(ax, obj, show_diameters=True):
'''Dendrogram of `obj`
Args:
obj: Neuron or tree \
neurom.Neuron, neurom.Tree
show_diameters : boolean \
Determines if node diameters will \
be show or not.
'''
# create dendrogram and generate rectangle collection
dnd = Dendrogram(obj, show_diameters=show_diameters)
dnd.generate()
# render dendrogram and take into account neurite displacement which
# starts as zero. It is important to avoid overlapping of neurites
# and to determine tha limits of the figure.
_render_dendrogram(dnd, ax, 0.)
ax.set_title('Morphology Dendrogram')
ax.set_xlabel('micrometers (um)')
ax.set_ylabel('micrometers (um)')
ax.set_aspect('auto')
ax.legend()
|
def CheckChangeOnCommit(input_api, output_api):
tests = input_api.canned_checks.GetUnitTestsInDirectory(
input_api, output_api, '.', files_to_check=['test_scripts.py$'])
return input_api.RunTests(tests)
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Usage(models.Model):
ip = models.CharField(max_length=50)
method = models.CharField(max_length=3)
path = models.CharField(max_length=100)
params = models.CharField(max_length=255)
def __str__(self):
return self.ip
@python_2_unicode_compatible
class Element(models.Model):
name = models.CharField(max_length=10)
code = models.CharField(max_length=10)
def __str__(self):
return self.name
class Meta:
verbose_name = "ธาตุ"
verbose_name_plural = "ธาตุต่างๆ"
db_table = 'element'
@python_2_unicode_compatible
class Disease(models.Model):
name = models.CharField(max_length=100, unique=True)
description = models.CharField(max_length=255, null=True)
is_congenital = models.BooleanField(default=False)
created_by = models.CharField(max_length=50, null=True)
created_date = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True, null=True)
last_modified_by = models.CharField(max_length=30, null=True, blank=True)
def __str__(self):
return self.name
class Meta:
verbose_name = "เชื้อโรค"
verbose_name_plural = "กลุ่มเชื้อโรค"
db_table = 'disease'
class Nutrient(models.Model):
water = models.DecimalField(max_digits=14, decimal_places=4)
protein = models.DecimalField(max_digits=14, decimal_places=4)
fat = models.DecimalField(max_digits=14, decimal_places=4)
carbohydrate = models.DecimalField(max_digits=14, decimal_places=4)
dietary_fiber = models.DecimalField(max_digits=14, decimal_places=4)
ash = models.DecimalField(max_digits=14, decimal_places=4)
calcium = models.DecimalField(max_digits=14, decimal_places=4)
phosphorus = models.DecimalField(max_digits=14, decimal_places=4)
iron = models.DecimalField(max_digits=14, decimal_places=4)
retinol = models.DecimalField(max_digits=14, decimal_places=4)
beta_carotene = models.DecimalField(max_digits=14, decimal_places=4)
vitamin_a = models.DecimalField(max_digits=14, decimal_places=4)
vitamin_e = models.DecimalField(max_digits=14, decimal_places=4)
thiamin = models.DecimalField(max_digits=14, decimal_places=4)
riboflavin = models.DecimalField(max_digits=14, decimal_places=4)
niacin = models.DecimalField(max_digits=14, decimal_places=4)
vitamin_c = models.DecimalField(max_digits=14, decimal_places=4)
def __str__(self):
return 'id: ' + str(self._get_pk_val())
class Meta:
verbose_name = "สารอาหาร"
verbose_name_plural = "กลุ่มสารอาหาร"
db_table = 'nutrient'
@python_2_unicode_compatible
class IngredientCategory(models.Model):
name = models.CharField(max_length=50, unique=True)
created_by = models.CharField(max_length=50)
created_date = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True, null=True)
last_modified_by = models.CharField(max_length=30, null=True, blank=True)
def __str__(self):
return self.name
class Meta:
verbose_name = "หมวดหมู่วัตถุดิบ"
verbose_name_plural = "กลุ่มหมวดหมู่วัตถุดิบ"
db_table = 'ingredient_type'
@python_2_unicode_compatible
class FoodCategory(models.Model):
name = models.CharField(max_length=50, unique=True)
created_by = models.CharField(max_length=50)
created_date = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True, null=True)
last_modified_by = models.CharField(max_length=30, null=True, blank=True)
def __str__(self):
return self.name
class Meta:
verbose_name = "หมวดหมู่อาหาร"
verbose_name_plural = "กลุ่มหมวดหมู่อาหาร"
db_table = 'food_type'
@python_2_unicode_compatible
class Ingredient(models.Model):
name = models.CharField(max_length=100, unique=True)
description = models.CharField(max_length=255, blank=True, null=True)
calories = models.IntegerField(default=0)
nutrient = models.ForeignKey(Nutrient,
on_delete=models.SET_NULL,
blank=True,
null=True)
element = models.ForeignKey(Element,
on_delete=models.SET_NULL,
blank=True,
null=True)
category = models.ManyToManyField(IngredientCategory, blank=True)
healing = models.ManyToManyField(Disease, related_name="healing", blank=True)
affect = models.ManyToManyField(Disease, related_name="affect", blank=True)
code = models.IntegerField(default=0)
def __str__(self):
return self.name
class Meta:
verbose_name = "วัตถุดิบ"
verbose_name_plural = "กลุ่มวัตถุดิบ"
db_table = 'ingredient'
@python_2_unicode_compatible
class Food(models.Model):
name = models.CharField(max_length=100, unique=True)
description = models.CharField(max_length=255, blank=True, null=True, default="")
calories = models.IntegerField(default=0)
nutrient = models.ForeignKey(Nutrient,
on_delete=models.SET_NULL,
blank=True,
null=True)
ingredients = models.ManyToManyField(Ingredient, through='Menu')
category = models.ManyToManyField(FoodCategory)
created_by = models.CharField(max_length=50, default="")
created_date = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True, null=True)
last_modified_by = models.CharField(max_length=30, null=True, blank=True)
code = models.IntegerField(default=0)
def __str__(self):
return self.name
class Meta:
verbose_name = "อาหาร"
verbose_name_plural = "กลุ่มอาหาร"
db_table = 'food'
class Menu(models.Model):
food = models.ForeignKey(Food, on_delete=models.CASCADE)
ingredient = models.ForeignKey(Ingredient, on_delete=models.CASCADE)
weight = models.DecimalField(max_digits=14, decimal_places=4)
name = models.CharField(max_length=100, blank=True, default="")
class Meta:
db_table = 'menu'
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 12, transform = "None", sigma = 0.0, exog_count = 20, ar_order = 0);
|
"""A generic class to build line-oriented command interpreters.
Interpreters constructed with this class obey the following conventions:
1. End of file on input is processed as the command 'EOF'.
2. A command is parsed out of each line by collecting the prefix composed
of characters in the identchars member.
3. A command `foo' is dispatched to a method 'do_foo()'; the do_ method
is passed a single argument consisting of the remainder of the line.
4. Typing an empty line repeats the last command. (Actually, it calls the
method `emptyline', which may be overridden in a subclass.)
5. There is a predefined `help' method. Given an argument `topic', it
calls the command `help_topic'. With no arguments, it lists all topics
with defined help_ functions, broken into up to three topics; documented
commands, miscellaneous help topics, and undocumented commands.
6. The command '?' is a synonym for `help'. The command '!' is a synonym
for `shell', if a do_shell method exists.
7. If completion is enabled, completing commands will be done automatically,
and completing of commands args is done by calling complete_foo() with
arguments text, line, begidx, endidx. text is string we are matching
against, all returned matches must begin with it. line is the current
input line (lstripped), begidx and endidx are the beginning and end
indexes of the text being matched, which could be used to provide
different completion depending upon which position the argument is in.
The `default' method may be overridden to intercept commands for which there
is no do_ method.
The `completedefault' method may be overridden to intercept completions for
commands that have no complete_ method.
The data member `self.ruler' sets the character used to draw separator lines
in the help messages. If empty, no ruler line is drawn. It defaults to "=".
If the value of `self.intro' is nonempty when the cmdloop method is called,
it is printed out on interpreter startup. This value may be overridden
via an optional argument to the cmdloop() method.
The data members `self.doc_header', `self.misc_header', and
`self.undoc_header' set the headers used for the help function's
listings of documented functions, miscellaneous topics, and undocumented
functions respectively.
"""
import string, sys
__all__ = ["Cmd"]
PROMPT = '(Cmd) '
IDENTCHARS = string.ascii_letters + string.digits + '_'
class ElCmd:
"""A simple framework for writing line-oriented command interpreters.
These are often useful for test harnesses, administrative tools, and
prototypes that will later be wrapped in a more sophisticated interface.
A Cmd instance or subclass instance is a line-oriented interpreter
framework. There is no good reason to instantiate Cmd itself; rather,
it's useful as a superclass of an interpreter class you define yourself
in order to inherit Cmd's methods and encapsulate action methods.
"""
prompt = PROMPT
identchars = IDENTCHARS
ruler = '='
lastcmd = ''
intro = None
doc_leader = ""
doc_header = "Documented commands (type help <topic>):"
misc_header = "Miscellaneous help topics:"
undoc_header = "Undocumented commands:"
nohelp = "*** No help on %s"
use_rawinput = False
def __init__(self, completekey='tab', stdin=None, stdout=None):
"""Instantiate a line-oriented interpreter framework.
The optional argument 'completekey' is the readline name of a
completion key; it defaults to the Tab key. If completekey is
not None and the readline module is available, command completion
is done automatically. The optional arguments stdin and stdout
specify alternate input and output file objects; if not specified,
sys.stdin and sys.stdout are used.
"""
if stdin is not None:
self.stdin = stdin
else:
self.stdin = sys.stdin
if stdout is not None:
self.stdout = stdout
else:
self.stdout = sys.stdout
self.cmdqueue = []
self.completekey = completekey
if not self.use_rawinput and self.completekey:
try:
import editline
self.editline = editline.editline("CMD",
self.stdin, self.stdout, sys.stderr)
self.editline.rl_completer = self.complete
except ImportError:
print("Failed to import editline")
pass
def cmdloop(self, intro=None):
"""Repeatedly issue a prompt, accept input, parse an initial prefix
off the received input, and dispatch to action methods, passing them
the remainder of the line as argument.
"""
self.preloop()
try:
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro)+"\n")
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue.pop(0)
else:
if self.use_rawinput:
try:
line = input(self.prompt)
except EOFError:
line = 'EOF'
else:
self.editline.prompt = self.prompt
line = self.editline.readline()
if not len(line):
line = 'EOF'
else:
line = line.rstrip('\r\n')
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
finally:
pass
def precmd(self, line):
"""Hook method executed just before the command line is
interpreted, but after the input prompt is generated and issued.
"""
return line
def postcmd(self, stop, line):
"""Hook method executed just after a command dispatch is finished."""
return stop
def preloop(self):
"""Hook method executed once when the cmdloop() method is called."""
pass
def postloop(self):
"""Hook method executed once when the cmdloop() method is about to
return.
"""
pass
def parseline(self, line):
"""Parse the line into a command name and a string containing
the arguments. Returns a tuple containing (command, args, line).
'command' and 'args' may be None if the line couldn't be parsed.
"""
line = line.strip()
if not line:
return None, None, line
elif line[0] == '?':
line = 'help ' + line[1:]
elif line[0] == '!':
if hasattr(self, 'do_shell'):
line = 'shell ' + line[1:]
else:
return None, None, line
i, n = 0, len(line)
while i < n and line[i] in self.identchars: i = i+1
cmd, arg = line[:i], line[i:].strip()
return cmd, arg, line
def onecmd(self, line):
"""Interpret the argument as though it had been typed in response
to the prompt.
This may be overridden, but should not normally need to be;
see the precmd() and postcmd() methods for useful execution hooks.
The return value is a flag indicating whether interpretation of
commands by the interpreter should stop.
"""
cmd, arg, line = self.parseline(line)
if not line:
return self.emptyline()
if cmd is None:
return self.default(line)
self.lastcmd = line
if line == 'EOF' :
print("")
print("Bye")
sys.exit(0)
if cmd == '':
return self.default(line)
else:
try:
func = getattr(self, 'do_' + cmd)
except AttributeError:
return self.default(line)
return func(arg)
def emptyline(self):
"""Called when an empty line is entered in response to the prompt.
If this method is not overridden, it repeats the last nonempty
command entered.
"""
if self.lastcmd:
return self.onecmd(self.lastcmd)
def default(self, line):
"""Called on an input line when the command prefix is not recognized.
If this method is not overridden, it prints an error message and
returns.
"""
self.stdout.write('*** Unknown syntax: %s (%d)\n' % (line,len(line)))
def completedefault(self, *ignored):
"""Method called to complete an input line when no command-specific
complete_*() method is available.
By default, it returns an empty list.
"""
return []
def completenames(self, text, *ignored):
dotext = 'do_'+text
return [a[3:] for a in self.get_names() if a.startswith(dotext)]
def complete(self, text, state):
"""Return the next possible completion for 'text'.
If a command has not been entered, then complete against command list.
Otherwise try to call complete_<command> to get list of completions.
"""
if state == 0:
origline = self.editline.get_line_buffer()
line = origline.lstrip()
stripped = len(origline) - len(line)
begidx = self.editline.get_begidx() - stripped
endidx = self.editline.get_endidx() - stripped
if begidx>0:
cmd, args, foo = self.parseline(line)
if cmd == '':
compfunc = self.completedefault
else:
try:
compfunc = getattr(self, 'complete_' + cmd)
except AttributeError:
compfunc = self.completedefault
else:
compfunc = self.completenames
self.completion_matches = compfunc(text, line, begidx, endidx)
try:
return self.completion_matches[state]
except IndexError:
return None
def get_names(self):
# This method used to pull in base class attributes
# at a time dir() didn't do it yet.
return dir(self.__class__)
def complete_help(self, *args):
commands = set(self.completenames(*args))
topics = set(a[5:] for a in self.get_names()
if a.startswith('help_' + args[0]))
return list(commands | topics)
def do_help(self, arg):
'List available commands with "help" or detailed help with "help cmd".'
if arg:
# XXX check arg syntax
try:
func = getattr(self, 'help_' + arg)
except AttributeError:
try:
doc=getattr(self, 'do_' + arg).__doc__
if doc:
self.stdout.write("%s\n"%str(doc))
return
except AttributeError:
pass
self.stdout.write("%s\n"%str(self.nohelp % (arg,)))
return
func()
else:
names = self.get_names()
cmds_doc = []
cmds_undoc = []
help = {}
for name in names:
if name[:5] == 'help_':
help[name[5:]]=1
names.sort()
# There can be duplicates if routines overridden
prevname = ''
for name in names:
if name[:3] == 'do_':
if name == prevname:
continue
prevname = name
cmd=name[3:]
if cmd in help:
cmds_doc.append(cmd)
del help[cmd]
elif getattr(self, name).__doc__:
cmds_doc.append(cmd)
else:
cmds_undoc.append(cmd)
self.stdout.write("%s\n"%str(self.doc_leader))
self.print_topics(self.doc_header, cmds_doc, 15,80)
self.print_topics(self.misc_header, list(help.keys()),15,80)
self.print_topics(self.undoc_header, cmds_undoc, 15,80)
def print_topics(self, header, cmds, cmdlen, maxcol):
if cmds:
self.stdout.write("%s\n"%str(header))
if self.ruler:
self.stdout.write("%s\n"%str(self.ruler * len(header)))
self.columnize(cmds, maxcol-1)
self.stdout.write("\n")
def columnize(self, list, displaywidth=80):
"""Display a list of strings as a compact set of columns.
Each column is only as wide as necessary.
Columns are separated by two spaces (one was not legible enough).
"""
if not list:
self.stdout.write("<empty>\n")
return
nonstrings = [i for i in range(len(list))
if not isinstance(list[i], str)]
if nonstrings:
raise TypeError("list[i] not a string for i in %s"
% ", ".join(map(str, nonstrings)))
size = len(list)
if size == 1:
self.stdout.write('%s\n'%str(list[0]))
return
# Try every row count from 1 upwards
for nrows in range(1, len(list)):
ncols = (size+nrows-1) // nrows
colwidths = []
totwidth = -2
for col in range(ncols):
colwidth = 0
for row in range(nrows):
i = row + nrows*col
if i >= size:
break
x = list[i]
colwidth = max(colwidth, len(x))
colwidths.append(colwidth)
totwidth += colwidth + 2
if totwidth > displaywidth:
break
if totwidth <= displaywidth:
break
else:
nrows = len(list)
ncols = 1
colwidths = [0]
for row in range(nrows):
texts = []
for col in range(ncols):
i = row + nrows*col
if i >= size:
x = ""
else:
x = list[i]
texts.append(x)
while texts and not texts[-1]:
del texts[-1]
for col in range(len(texts)):
texts[col] = texts[col].ljust(colwidths[col])
self.stdout.write("%s\n"%str(" ".join(texts)))
class MyCmd(ElCmd,object):
def do_bleep(self, s):
print("bleep!")
def do_blob(self, s):
print("blob!")
def do_bob(self, s):
print("bob!")
def do_mods(self, s):
print(sys.modules.keys())
if __name__ == '__main__':
mc = MyCmd()
mc.cmdloop()
|
from pymacy.db import get_db
from bson.json_util import dumps
db = get_db()
results = []
count = 0
for i in db.benchmark.find({"element": "Ni"}):
count += 1
if count > 100:
break
results.append(i)
print(results[0])
with open("Ni.json", 'w') as f:
file = dumps(results)
f.write(file)
|
"""
Generic, configurable scatterplot
"""
import collections
import warnings
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
class PlottingAttribute(object):
__slots__ = 'groupby', 'title', 'palette', 'group_to_attribute'
def __init__(self, groupby, title, palette, order):
"""An attribute that you want to visualize with a specific visual cue
Parameters
----------
groupby : mappable
A series or dict or list to groupby on the rows of the data
title : str
Title of this part of the legend
palette : list-like
What to plot for each group
"""
self.groupby = groupby
self.title = title
self.palette = palette
if order is not None:
# there's more than one attribute
self.group_to_attribute = dict(zip(order, palette))
else:
# There's only one attribute
self.group_to_attribute = {None: palette[0]}
def __getitem__(self, item):
return self.group_to_attribute[item]
class PlotterMixin(object):
"""
Must be mixed with something that creates the ``self.plot_data`` attribute
Attributes
----------
color :
"""
# Markers that can be filled, in a reasonable order so things that can be
# confused with each other (e.g. triangles pointing to the left or right) are
# not next to each other
filled_markers = (u'o', u'v', u's', u'*', u'h', u'<', u'H', u'x', u'8',
u'>', u'D', u'd', u'^')
linewidth_min, linewidth_max = 0.1, 5
alpha_min, alpha_max = 0.1, 1
size_min, size_max = 3, 30
legend_order = 'color', 'symbol', 'linewidth', 'edgecolor', 'alpha', 'size'
def establish_colors(self, color, hue, hue_order, palette):
"""Get a list of colors for the main component of the plots."""
n_colors = None
current_palette = sns.utils.get_color_cycle()
color_labels = None
color_title = None
if color is not None and palette is not None:
error = 'Cannot interpret colors to plot when both "color" and ' \
'"palette" are specified'
raise ValueError(error)
# Force "hue" to be a mappable
if hue is not None:
try:
# Check if "hue" is a column in the data
color_title = str(hue)
hue = self.data[hue]
except (ValueError, KeyError):
# Hue is already a mappable
if isinstance(hue, pd.Series):
color_title = hue.name
else:
color_title = None
# This will give the proper number of categories even if there are
# more categories in "hue_order" than represented in "hue"
hue_order = sns.utils.categorical_order(hue, hue_order)
color_labels = hue_order
hue = pd.Categorical(hue, hue_order)
n_colors = len(self.plot_data.groupby(hue))
else:
if hue_order is not None:
# Check if "hue_order" specifies rows in the data
samples_to_plot = self.plot_data.index.intersection(hue_order)
n_colors = len(samples_to_plot)
if n_colors > 0:
# Different color for every sample (row name)
hue = pd.Series(self.plot_data.index,
index=self.plot_data.index)
else:
error = "When 'hue=None' and 'hue_order' is specified, " \
"'hue_order' must overlap with the data row " \
"names (index)"
raise ValueError(error)
else:
# Same color for everything
hue = pd.Series('hue', index=self.plot_data.index)
n_colors = 1
if palette is not None:
colors = sns.color_palette(palette, n_colors=n_colors)
elif color is not None:
colors = sns.light_palette(color, n_colors=n_colors)
else:
colors = sns.light_palette(current_palette[0],
n_colors=n_colors)
self.color = PlottingAttribute(hue, color_title, colors, hue_order)
def _maybe_make_grouper(self, attribute, palette_maker, order=None,
func=None, default=None):
"""Create a Series from a single attribute, else make categorical
Checks if the attribute is in the data provided, or is an external
mapper
Parameters
----------
attribute : object
Either a single item to create into a series, or a series mapping
each sample to an attribute (e.g. the plotting symbol 'o' or
linewidth 1)
palette_maker : function
Function which takes an integer and creates the appropriate
palette for the attribute, e.g. shades of grey for edgecolor or
linearly spaced sizes
order : list
The order to create the attributes into
func : function
A function which returns true if the attribute is a single valid
instance, e.g. "black" for color or 0.1 for linewidth. Otherwise,
we assume that "attribute" is a mappable
Returns
-------
grouper : pandas.Series
A mapping of the high dimensional data samples to the attribute
"""
title = None
if func is None or func(attribute):
# Use this single attribute for everything
return PlottingAttribute(pd.Series(None, index=self.samples),
title, (attribute,), order)
else:
try:
# Check if this is a column in the data
attribute = self.data[attribute]
except (ValueError, KeyError):
pass
if isinstance(attribute, pd.Series):
title = attribute.name
order = sns.utils.categorical_order(attribute, order)
palette = palette_maker(len(order))
attribute = pd.Categorical(attribute, categories=order,
ordered=True)
return PlottingAttribute(pd.Series(attribute, index=self.samples),
title, palette, order)
def establish_symbols(self, marker, marker_order, text, text_order):
"""Figure out what symbol put on the axes for each data point"""
symbol_title = None
if isinstance(text, bool):
# Option 1: Text is a boolean
if text:
# 1a: text=True, so use the sample names of data as the
# plotting symbol
symbol_title = 'Samples'
symbols = [str(x) for x in self.samples]
symbol = pd.Series(self.samples, index=self.samples)
else:
# 1b: text=False, so use the specified marker for each sample
symbol = self._maybe_make_grouper(marker, marker_order, str)
if marker is not None:
try:
symbol_title = marker
symbol = self.data[marker]
symbols = sns.categorical_order(symbol, marker_order)
except (ValueError, KeyError):
# Marker is a single marker, or already a groupable
if marker in self.filled_markers:
# Single marker so make a tuple so it's indexable
symbols = (marker,)
else:
# already a groupable object
if isinstance(marker, pd.Series):
symbol_title = marker.name
n_symbols = len(self.plot_data.groupby(symbol))
if n_symbols > len(self.filled_markers):
# If there's too many categories, then
# auto-expand the existing list of filled
# markers
multiplier = np.ceil(
n_symbols/float(len(self.filled_markers)))
filled_markers = list(self.filled_markers) \
* multiplier
symbols = filled_markers[:n_symbols]
else:
symbols = self.filled_markers[:n_symbols]
symbol = PlottingAttribute(symbol, symbol_title, symbols,
marker_order)
else:
# Assume "text" is a mapping from row names (sample ids) of the
# data to text labels
text_order = sns.utils.categorical_order(text, text_order)
symbols = text_order
symbol = pd.Series(pd.Categorical(text, categories=text_order,
ordered=True),
index=self.samples)
symbol = PlottingAttribute(symbol, symbol_title, symbols,
text_order)
if marker is not None:
warnings.warn('Overriding plotting symbol from "marker" with '
'values in "text"')
# Turn text into a boolean
text = True
self.symbol = symbol
self.text = text
def establish_symbol_attributes(self,linewidth, linewidth_order, edgecolor,
edgecolor_order, alpha, alpha_order, size,
size_order):
self.edgecolor = self._maybe_make_grouper(
edgecolor, self._edgecolor_palette, edgecolor_order,
mpl.colors.is_color_like)
self.linewidth = self._maybe_make_grouper(
linewidth, self._linewidth_palette, linewidth_order, np.isfinite)
self.alpha = self._maybe_make_grouper(
alpha, self._alpha_palette, alpha_order, np.isfinite)
self.size = self._maybe_make_grouper(
size, self._size_palette, size_order, np.isfinite)
@staticmethod
def _edgecolor_palette(self, n_groups):
return sns.color_palette('Greys', n_colors=n_groups)
def _linewidth_palette(self, n_groups):
return np.linspace(self.linewidth_min, self.linewidth_max, n_groups)
def _alpha_palette(self, n_groups):
return np.linspace(self.alpha_min, self.alpha_max, n_groups)
def _size_palette(self, n_groups):
return np.linspace(self.size_min, self.size_max, n_groups)
def symbolplotter(self, xs, ys, ax, symbol, linewidth, edgecolor, **kwargs):
"""Plots either a matplotlib marker or a string at each data position
Wraps plt.text and plt.plot
Parameters
----------
xs : array-like
List of x positions for data
ys : array-like
List of y-positions for data
symbol : str
What to plot at each (x, y) data position
text : bool
If true, then "symboL" is assumed to be a string and iterates over
each data point individually, using plt.text to position the text.
Otherwise, "symbol" is a matplotlib marker and uses plt.plot for
plotting
kwargs
Any other keyword arguments to plt.text or plt.plot
"""
# If both the x- and y- positions don't have data, don't do anything
if xs.empty and ys.empty:
return
if self.text:
# Add dummy plot to make the axes in the right window
ax.plot(xs, ys, color=None)
# Plot each (x, y) position as text
for x, y in zip(xs, ys):
ax.text(x, y, symbol, **kwargs)
else:
# use plt.plot instead of plt.scatter for speed, since plotting all
# the same marker shape and color and linestyle
ax.plot(xs, ys, 'o', marker=symbol, markeredgewidth=linewidth,
markeredgecolor=edgecolor, **kwargs)
def annotate_axes(self, ax):
"""Add descriptive labels to an Axes object."""
if self.xlabel is not None:
ax.set_xlabel(self.xlabel)
if self.ylabel is not None:
ax.set_ylabel(self.ylabel)
def establish_legend_data(self):
self.legend_data = pd.DataFrame(dict(color=self.color.groupby,
symbol=self.symbol.groupby,
size=self.size.groupby,
linewidth=self.linewidth.groupby,
edgecolor=self.edgecolor.groupby,
alpha=self.alpha.groupby),
index=self.samples)
self.legend_data = self.legend_data.reindex(columns=self.legend_order)
def draw_symbols(self, ax, plot_kws):
"""Plot each sample in the data"""
plot_kws = {} if plot_kws is None else plot_kws
for labels, df in self.legend_data.groupby(self.legend_order):
# Get the attributes in order, using the group label to get the
# attribute
for name, label in zip(self.legend_order, labels):
plot_kws[name] = getattr(self, name)[label]
self.symbolplotter(df.iloc[:, 0], df.iloc[:, 1], **plot_kws)
# Iterate over all the possible modifications of the points
# TODO: add alpha and size
# for i, (color_label, df1) in enumerate(self.plot_data.groupby(self.color.groupby)):
# color = self.color.palette[i]
# for j, (marker_label, df2) in enumerate(df1.groupby(self.symbol.groupby)):
# symbol = self.symbol.palette[j]
# for k, (lw_label, df3) in enumerate(df2.groupby(self.linewidth.groupby)):
# linewidth = self.linewidth.palette[k]
# for l, (ec_label, df4) in df3.groupby(self.edgecolor):
# edgecolor = self.edgecolor.palette[l]
# # and finally ... actually plot the data!
# for m
# self.symbolplotter(df4.iloc[:, 0], df4.iloc[:, 1],
# symbol=symbol, color=color,
# ax=ax, linewidth=linewidth,
# edgecolor=edgecolor, **plot_kws)
#
class ScatterPlotter(PlotterMixin):
def __init__(self, data, x, y, color, hue, hue_order, palette, marker,
marker_order, text, text_order, linewidth, linewidth_order,
edgecolor, edgecolor_order, alpha, alpha_order, size,
size_order):
self.establish_data(data, x, y)
self.establish_symbols(marker, marker_order, text, text_order)
self.establish_symbol_attributes(linewidth, linewidth_order, edgecolor,
edgecolor_order, alpha, alpha_order, size, size_order)
self.establish_colors(color, hue, hue_order, palette)
self.establish_legend_data()
# import pdb; pdb.set_trace()
def establish_data(self, data, x, y):
if isinstance(data, pd.DataFrame):
xlabel = data.columns[x]
ylabel = data.columns[y]
else:
data = pd.DataFrame(data)
xlabel = None
ylabel = None
self.data = data
self.plot_data = self.data.iloc[:, [x, y]]
self.xlabel = xlabel
self.ylabel = ylabel
self.samples = self.plot_data.index
self.features = self.plot_data.columns
self.n_samples = len(self.samples)
self.n_features = len(self.features)
def plot(self, ax, kwargs):
self.draw_symbols(ax, kwargs)
self.annotate_axes(ax)
def scatterplot(data, x=0, y=1, color=None, hue=None, hue_order=None,
palette=None, marker='o', marker_order=None, text=False,
text_order=None, linewidth=1, linewidth_order=None,
edgecolor='k', edgecolor_order=None, alpha=1, alpha_order=None,
size=7, size_order=None, ax=None, **kwargs):
plotter = ScatterPlotter(data, x, y, color, hue, hue_order, palette,
marker, marker_order, text, text_order, linewidth,
linewidth_order, edgecolor, edgecolor_order,
alpha, alpha_order, size, size_order)
if ax is None:
ax = plt.gca()
plotter.plot(ax, kwargs)
return ax
|
import os
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cacheops.simple import file_cache, FILE_CACHE_DIR
class Command(BaseCommand):
help = 'Clean filebased cache'
def handle(self, **options):
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % FILE_CACHE_DIR)
|
"""Model tests
Unit tests for model utility methods.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import httplib2
import unittest
from apiclient.model import makepatch
TEST_CASES = [
# (message, original, modified, expected)
("Remove an item from an object",
{'a': 1, 'b': 2}, {'a': 1}, {'b': None}),
("Add an item to an object",
{'a': 1}, {'a': 1, 'b': 2}, {'b': 2}),
("No changes",
{'a': 1, 'b': 2}, {'a': 1, 'b': 2}, {}),
("Empty objects",
{}, {}, {}),
("Modify an item in an object",
{'a': 1, 'b': 2}, {'a': 1, 'b': 3}, {'b': 3}),
("Change an array",
{'a': 1, 'b': [2, 3]}, {'a': 1, 'b': [2]}, {'b': [2]}),
("Modify a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar', 'baz': 'qaax'}},
{'b': {'baz': 'qaax'}}),
("Modify a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qaax'}]},
{'b': [{'foo':'bar', 'baz': 'qaax'}]}),
("Remove item from a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar'}]},
{'b': [{'foo':'bar'}]}),
("Remove a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar'}},
{'b': {'baz': None}})
]
class TestPatch(unittest.TestCase):
def test_patch(self):
for (msg, orig, mod, expected_patch) in TEST_CASES:
self.assertEqual(expected_patch, makepatch(orig, mod), msg=msg)
if __name__ == '__main__':
unittest.main()
|
"""
jinja2.filters
~~~~~~~~~~~~~~
Bundled jinja filters.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
import math
from random import choice
from operator import itemgetter
from itertools import groupby
from jinja2.utils import Markup, escape, pformat, urlize, soft_unicode, \
unicode_urlencode
from jinja2.runtime import Undefined
from jinja2.exceptions import FilterArgumentError
from jinja2._compat import imap, string_types, text_type, iteritems
_word_re = re.compile(r'\w+(?u)')
def contextfilter(f):
"""Decorator for marking context dependent filters. The current
:class:`Context` will be passed as first argument.
"""
f.contextfilter = True
return f
def evalcontextfilter(f):
"""Decorator for marking eval-context dependent filters. An eval
context object is passed as first argument. For more information
about the eval context, see :ref:`eval-context`.
.. versionadded:: 2.4
"""
f.evalcontextfilter = True
return f
def environmentfilter(f):
"""Decorator for marking evironment dependent filters. The current
:class:`Environment` is passed to the filter as first argument.
"""
f.environmentfilter = True
return f
def make_attrgetter(environment, attribute):
"""Returns a callable that looks up the given attribute from a
passed object with the rules of the environment. Dots are allowed
to access attributes of attributes. Integer parts in paths are
looked up as integers.
"""
if not isinstance(attribute, string_types) \
or ('.' not in attribute and not attribute.isdigit()):
return lambda x: environment.getitem(x, attribute)
attribute = attribute.split('.')
def attrgetter(item):
for part in attribute:
if part.isdigit():
part = int(part)
item = environment.getitem(item, part)
return item
return attrgetter
def do_forceescape(value):
"""Enforce HTML escaping. This will probably double escape variables."""
if hasattr(value, '__html__'):
value = value.__html__()
return escape(text_type(value))
def do_urlencode(value):
"""Escape strings for use in URLs (uses UTF-8 encoding). It accepts both
dictionaries and regular strings as well as pairwise iterables.
.. versionadded:: 2.7
"""
itemiter = None
if isinstance(value, dict):
itemiter = iteritems(value)
elif not isinstance(value, string_types):
try:
itemiter = iter(value)
except TypeError:
pass
if itemiter is None:
return unicode_urlencode(value)
return u'&'.join(unicode_urlencode(k) + '=' +
unicode_urlencode(v) for k, v in itemiter)
@evalcontextfilter
def do_replace(eval_ctx, s, old, new, count=None):
"""Return a copy of the value with all occurrences of a substring
replaced with a new one. The first argument is the substring
that should be replaced, the second is the replacement string.
If the optional third argument ``count`` is given, only the first
``count`` occurrences are replaced:
.. sourcecode:: jinja
{{ "Hello World"|replace("Hello", "Goodbye") }}
-> Goodbye World
{{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
-> d'oh, d'oh, aaargh
"""
if count is None:
count = -1
if not eval_ctx.autoescape:
return text_type(s).replace(text_type(old), text_type(new), count)
if hasattr(old, '__html__') or hasattr(new, '__html__') and \
not hasattr(s, '__html__'):
s = escape(s)
else:
s = soft_unicode(s)
return s.replace(soft_unicode(old), soft_unicode(new), count)
def do_upper(s):
"""Convert a value to uppercase."""
return soft_unicode(s).upper()
def do_lower(s):
"""Convert a value to lowercase."""
return soft_unicode(s).lower()
@evalcontextfilter
def do_xmlattr(_eval_ctx, d, autospace=True):
"""Create an SGML/XML attribute string based on the items in a dict.
All values that are neither `none` nor `undefined` are automatically
escaped:
.. sourcecode:: html+jinja
<ul{{ {'class': 'my_list', 'missing': none,
'id': 'list-%d'|format(variable)}|xmlattr }}>
...
</ul>
Results in something like this:
.. sourcecode:: html
<ul class="my_list" id="list-42">
...
</ul>
As you can see it automatically prepends a space in front of the item
if the filter returned something unless the second parameter is false.
"""
rv = u' '.join(
u'%s="%s"' % (escape(key), escape(value))
for key, value in iteritems(d)
if value is not None and not isinstance(value, Undefined)
)
if autospace and rv:
rv = u' ' + rv
if _eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_capitalize(s):
"""Capitalize a value. The first character will be uppercase, all others
lowercase.
"""
return soft_unicode(s).capitalize()
def do_title(s):
"""Return a titlecased version of the value. I.e. words will start with
uppercase letters, all remaining characters are lowercase.
"""
rv = []
for item in re.compile(r'([-\s]+)(?u)').split(soft_unicode(s)):
if not item:
continue
rv.append(item[0].upper() + item[1:].lower())
return ''.join(rv)
def do_dictsort(value, case_sensitive=False, by='key'):
"""Sort a dict and yield (key, value) pairs. Because python dicts are
unsorted you may want to use this function to order them by either
key or value:
.. sourcecode:: jinja
{% for item in mydict|dictsort %}
sort the dict by key, case insensitive
{% for item in mydict|dictsort(true) %}
sort the dict by key, case sensitive
{% for item in mydict|dictsort(false, 'value') %}
sort the dict by value, case insensitive
"""
if by == 'key':
pos = 0
elif by == 'value':
pos = 1
else:
raise FilterArgumentError('You can only sort by either '
'"key" or "value"')
def sort_func(item):
value = item[pos]
if isinstance(value, string_types) and not case_sensitive:
value = value.lower()
return value
return sorted(value.items(), key=sort_func)
@environmentfilter
def do_sort(environment, value, reverse=False, case_sensitive=False,
attribute=None):
"""Sort an iterable. Per default it sorts ascending, if you pass it
true as first argument it will reverse the sorting.
If the iterable is made of strings the third parameter can be used to
control the case sensitiveness of the comparison which is disabled by
default.
.. sourcecode:: jinja
{% for item in iterable|sort %}
...
{% endfor %}
It is also possible to sort by an attribute (for example to sort
by the date of an object) by specifying the `attribute` parameter:
.. sourcecode:: jinja
{% for item in iterable|sort(attribute='date') %}
...
{% endfor %}
.. versionchanged:: 2.6
The `attribute` parameter was added.
"""
if not case_sensitive:
def sort_func(item):
if isinstance(item, string_types):
item = item.lower()
return item
else:
sort_func = None
if attribute is not None:
getter = make_attrgetter(environment, attribute)
def sort_func(item, processor=sort_func or (lambda x: x)):
return processor(getter(item))
return sorted(value, key=sort_func, reverse=reverse)
def do_default(value, default_value=u'', boolean=False):
"""If the value is undefined it will return the passed default value,
otherwise the value of the variable:
.. sourcecode:: jinja
{{ my_variable|default('my_variable is not defined') }}
This will output the value of ``my_variable`` if the variable was
defined, otherwise ``'my_variable is not defined'``. If you want
to use default with variables that evaluate to false you have to
set the second parameter to `true`:
.. sourcecode:: jinja
{{ ''|default('the string was empty', true) }}
"""
if isinstance(value, Undefined) or (boolean and not value):
return default_value
return value
@evalcontextfilter
def do_join(eval_ctx, value, d=u'', attribute=None):
"""Return a string which is the concatenation of the strings in the
sequence. The separator between elements is an empty string per
default, you can define it with the optional parameter:
.. sourcecode:: jinja
{{ [1, 2, 3]|join('|') }}
-> 1|2|3
{{ [1, 2, 3]|join }}
-> 123
It is also possible to join certain attributes of an object:
.. sourcecode:: jinja
{{ users|join(', ', attribute='username') }}
.. versionadded:: 2.6
The `attribute` parameter was added.
"""
if attribute is not None:
value = imap(make_attrgetter(eval_ctx.environment, attribute), value)
# no automatic escaping? joining is a lot eaiser then
if not eval_ctx.autoescape:
return text_type(d).join(imap(text_type, value))
# if the delimiter doesn't have an html representation we check
# if any of the items has. If yes we do a coercion to Markup
if not hasattr(d, '__html__'):
value = list(value)
do_escape = False
for idx, item in enumerate(value):
if hasattr(item, '__html__'):
do_escape = True
else:
value[idx] = text_type(item)
if do_escape:
d = escape(d)
else:
d = text_type(d)
return d.join(value)
# no html involved, to normal joining
return soft_unicode(d).join(imap(soft_unicode, value))
def do_center(value, width=80):
"""Centers the value in a field of a given width."""
return text_type(value).center(width)
@environmentfilter
def do_first(environment, seq):
"""Return the first item of a sequence."""
try:
return next(iter(seq))
except StopIteration:
return environment.undefined('No first item, sequence was empty.')
@environmentfilter
def do_last(environment, seq):
"""Return the last item of a sequence."""
try:
return next(iter(reversed(seq)))
except StopIteration:
return environment.undefined('No last item, sequence was empty.')
@environmentfilter
def do_random(environment, seq):
"""Return a random item from the sequence."""
try:
return choice(seq)
except IndexError:
return environment.undefined('No random item, sequence was empty.')
def do_filesizeformat(value, binary=False):
"""Format the value like a 'human-readable' file size (i.e. 13 kB,
4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
Giga, etc.), if the second parameter is set to `True` the binary
prefixes are used (Mebi, Gibi).
"""
bytes = float(value)
base = binary and 1024 or 1000
prefixes = [
(binary and 'KiB' or 'kB'),
(binary and 'MiB' or 'MB'),
(binary and 'GiB' or 'GB'),
(binary and 'TiB' or 'TB'),
(binary and 'PiB' or 'PB'),
(binary and 'EiB' or 'EB'),
(binary and 'ZiB' or 'ZB'),
(binary and 'YiB' or 'YB')
]
if bytes == 1:
return '1 Byte'
elif bytes < base:
return '%d Bytes' % bytes
else:
for i, prefix in enumerate(prefixes):
unit = base ** (i + 2)
if bytes < unit:
return '%.1f %s' % ((base * bytes / unit), prefix)
return '%.1f %s' % ((base * bytes / unit), prefix)
def do_pprint(value, verbose=False):
"""Pretty print a variable. Useful for debugging.
With Jinja 1.2 onwards you can pass it a parameter. If this parameter
is truthy the output will be more verbose (this requires `pretty`)
"""
return pformat(value, verbose=verbose)
@evalcontextfilter
def do_urlize(eval_ctx, value, trim_url_limit=None, nofollow=False,
target=None):
"""Converts URLs in plain text into clickable links.
If you pass the filter an additional integer it will shorten the urls
to that number. Also a third argument exists that makes the urls
"nofollow":
.. sourcecode:: jinja
{{ mytext|urlize(40, true) }}
links are shortened to 40 chars and defined with rel="nofollow"
If *target* is specified, the ``target`` attribute will be added to the
``<a>`` tag:
.. sourcecode:: jinja
{{ mytext|urlize(40, target='_blank') }}
.. versionchanged:: 2.8+
The *target* parameter was added.
"""
rv = urlize(value, trim_url_limit, nofollow, target)
if eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_indent(s, width=4, indentfirst=False):
"""Return a copy of the passed string, each line indented by
4 spaces. The first line is not indented. If you want to
change the number of spaces or indent the first line too
you can pass additional parameters to the filter:
.. sourcecode:: jinja
{{ mytext|indent(2, true) }}
indent by two spaces and indent the first line too.
"""
indention = u' ' * width
rv = (u'\n' + indention).join(s.splitlines())
if indentfirst:
rv = indention + rv
return rv
def do_truncate(s, length=255, killwords=False, end='...'):
"""Return a truncated copy of the string. The length is specified
with the first parameter which defaults to ``255``. If the second
parameter is ``true`` the filter will cut the text at length. Otherwise
it will discard the last word. If the text was in fact
truncated it will append an ellipsis sign (``"..."``). If you want a
different ellipsis sign than ``"..."`` you can specify it using the
third parameter.
.. sourcecode:: jinja
{{ "foo bar baz"|truncate(9) }}
-> "foo ..."
{{ "foo bar baz"|truncate(9, True) }}
-> "foo ba..."
"""
if len(s) <= length:
return s
elif killwords:
return s[:length - len(end)] + end
result = s[:length - len(end)].rsplit(' ', 1)[0]
if len(result) < length:
result += ' '
return result + end
@environmentfilter
def do_wordwrap(environment, s, width=79, break_long_words=True,
wrapstring=None):
"""
Return a copy of the string passed to the filter wrapped after
``79`` characters. You can override this default using the first
parameter. If you set the second parameter to `false` Jinja will not
split words apart if they are longer than `width`. By default, the newlines
will be the default newlines for the environment, but this can be changed
using the wrapstring keyword argument.
.. versionadded:: 2.7
Added support for the `wrapstring` parameter.
"""
if not wrapstring:
wrapstring = environment.newline_sequence
import textwrap
return wrapstring.join(textwrap.wrap(s, width=width, expand_tabs=False,
replace_whitespace=False,
break_long_words=break_long_words))
def do_wordcount(s):
"""Count the words in that string."""
return len(_word_re.findall(s))
def do_int(value, default=0):
"""Convert the value into an integer. If the
conversion doesn't work it will return ``0``. You can
override this default using the first parameter.
"""
try:
return int(value)
except (TypeError, ValueError):
# this quirk is necessary so that "42.23"|int gives 42.
try:
return int(float(value))
except (TypeError, ValueError):
return default
def do_float(value, default=0.0):
"""Convert the value into a floating point number. If the
conversion doesn't work it will return ``0.0``. You can
override this default using the first parameter.
"""
try:
return float(value)
except (TypeError, ValueError):
return default
def do_format(value, *args, **kwargs):
"""
Apply python string formatting on an object:
.. sourcecode:: jinja
{{ "%s - %s"|format("Hello?", "Foo!") }}
-> Hello? - Foo!
"""
if args and kwargs:
raise FilterArgumentError('can\'t handle positional and keyword '
'arguments at the same time')
return soft_unicode(value) % (kwargs or args)
def do_trim(value):
"""Strip leading and trailing whitespace."""
return soft_unicode(value).strip()
def do_striptags(value):
"""Strip SGML/XML tags and replace adjacent whitespace by one space.
"""
if hasattr(value, '__html__'):
value = value.__html__()
return Markup(text_type(value)).striptags()
def do_slice(value, slices, fill_with=None):
"""Slice an iterator and return a list of lists containing
those items. Useful if you want to create a div containing
three ul tags that represent columns:
.. sourcecode:: html+jinja
<div class="columwrapper">
{%- for column in items|slice(3) %}
<ul class="column-{{ loop.index }}">
{%- for item in column %}
<li>{{ item }}</li>
{%- endfor %}
</ul>
{%- endfor %}
</div>
If you pass it a second argument it's used to fill missing
values on the last iteration.
"""
seq = list(value)
length = len(seq)
items_per_slice = length // slices
slices_with_extra = length % slices
offset = 0
for slice_number in range(slices):
start = offset + slice_number * items_per_slice
if slice_number < slices_with_extra:
offset += 1
end = offset + (slice_number + 1) * items_per_slice
tmp = seq[start:end]
if fill_with is not None and slice_number >= slices_with_extra:
tmp.append(fill_with)
yield tmp
def do_batch(value, linecount, fill_with=None):
"""
A filter that batches items. It works pretty much like `slice`
just the other way round. It returns a list of lists with the
given number of items. If you provide a second parameter this
is used to fill up missing items. See this example:
.. sourcecode:: html+jinja
<table>
{%- for row in items|batch(3, ' ') %}
<tr>
{%- for column in row %}
<td>{{ column }}</td>
{%- endfor %}
</tr>
{%- endfor %}
</table>
"""
tmp = []
for item in value:
if len(tmp) == linecount:
yield tmp
tmp = []
tmp.append(item)
if tmp:
if fill_with is not None and len(tmp) < linecount:
tmp += [fill_with] * (linecount - len(tmp))
yield tmp
def do_round(value, precision=0, method='common'):
"""Round the number to a given precision. The first
parameter specifies the precision (default is ``0``), the
second the rounding method:
- ``'common'`` rounds either up or down
- ``'ceil'`` always rounds up
- ``'floor'`` always rounds down
If you don't specify a method ``'common'`` is used.
.. sourcecode:: jinja
{{ 42.55|round }}
-> 43.0
{{ 42.55|round(1, 'floor') }}
-> 42.5
Note that even if rounded to 0 precision, a float is returned. If
you need a real integer, pipe it through `int`:
.. sourcecode:: jinja
{{ 42.55|round|int }}
-> 43
"""
if not method in ('common', 'ceil', 'floor'):
raise FilterArgumentError('method must be common, ceil or floor')
if method == 'common':
return round(value, precision)
func = getattr(math, method)
return func(value * (10 ** precision)) / (10 ** precision)
@environmentfilter
def do_groupby(environment, value, attribute):
"""Group a sequence of objects by a common attribute.
If you for example have a list of dicts or objects that represent persons
with `gender`, `first_name` and `last_name` attributes and you want to
group all users by genders you can do something like the following
snippet:
.. sourcecode:: html+jinja
<ul>
{% for group in persons|groupby('gender') %}
<li>{{ group.grouper }}<ul>
{% for person in group.list %}
<li>{{ person.first_name }} {{ person.last_name }}</li>
{% endfor %}</ul></li>
{% endfor %}
</ul>
Additionally it's possible to use tuple unpacking for the grouper and
list:
.. sourcecode:: html+jinja
<ul>
{% for grouper, list in persons|groupby('gender') %}
...
{% endfor %}
</ul>
As you can see the item we're grouping by is stored in the `grouper`
attribute and the `list` contains all the objects that have this grouper
in common.
.. versionchanged:: 2.6
It's now possible to use dotted notation to group by the child
attribute of another attribute.
"""
expr = make_attrgetter(environment, attribute)
return sorted(map(_GroupTuple, groupby(sorted(value, key=expr), expr)))
class _GroupTuple(tuple):
__slots__ = ()
grouper = property(itemgetter(0))
list = property(itemgetter(1))
def __new__(cls, xxx_todo_changeme):
(key, value) = xxx_todo_changeme
return tuple.__new__(cls, (key, list(value)))
@environmentfilter
def do_sum(environment, iterable, attribute=None, start=0):
"""Returns the sum of a sequence of numbers plus the value of parameter
'start' (which defaults to 0). When the sequence is empty it returns
start.
It is also possible to sum up only certain attributes:
.. sourcecode:: jinja
Total: {{ items|sum(attribute='price') }}
.. versionchanged:: 2.6
The `attribute` parameter was added to allow suming up over
attributes. Also the `start` parameter was moved on to the right.
"""
if attribute is not None:
iterable = imap(make_attrgetter(environment, attribute), iterable)
return sum(iterable, start)
def do_list(value):
"""Convert the value into a list. If it was a string the returned list
will be a list of characters.
"""
return list(value)
def do_mark_safe(value):
"""Mark the value as safe which means that in an environment with automatic
escaping enabled this variable will not be escaped.
"""
return Markup(value)
def do_mark_unsafe(value):
"""Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
return text_type(value)
def do_reverse(value):
"""Reverse the object or return an iterator the iterates over it the other
way round.
"""
if isinstance(value, string_types):
return value[::-1]
try:
return reversed(value)
except TypeError:
try:
rv = list(value)
rv.reverse()
return rv
except TypeError:
raise FilterArgumentError('argument must be iterable')
@environmentfilter
def do_attr(environment, obj, name):
"""Get an attribute of an object. ``foo|attr("bar")`` works like
``foo.bar`` just that always an attribute is returned and items are not
looked up.
See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
"""
try:
name = str(name)
except UnicodeError:
pass
else:
try:
value = getattr(obj, name)
except AttributeError:
pass
else:
if environment.sandboxed and not \
environment.is_safe_attribute(obj, name, value):
return environment.unsafe_undefined(obj, name)
return value
return environment.undefined(obj=obj, name=name)
@contextfilter
def do_map(*args, **kwargs):
"""Applies a filter on a sequence of objects or looks up an attribute.
This is useful when dealing with lists of objects but you are really
only interested in a certain value of it.
The basic usage is mapping on an attribute. Imagine you have a list
of users but you are only interested in a list of usernames:
.. sourcecode:: jinja
Users on this page: {{ users|map(attribute='username')|join(', ') }}
Alternatively you can let it invoke a filter by passing the name of the
filter and the arguments afterwards. A good example would be applying a
text conversion filter on a sequence:
.. sourcecode:: jinja
Users on this page: {{ titles|map('lower')|join(', ') }}
.. versionadded:: 2.7
"""
context = args[0]
seq = args[1]
if len(args) == 2 and 'attribute' in kwargs:
attribute = kwargs.pop('attribute')
if kwargs:
raise FilterArgumentError('Unexpected keyword argument %r' %
next(iter(kwargs)))
func = make_attrgetter(context.environment, attribute)
else:
try:
name = args[2]
args = args[3:]
except LookupError:
raise FilterArgumentError('map requires a filter argument')
func = lambda item: context.environment.call_filter(
name, item, args, kwargs, context=context)
if seq:
for item in seq:
yield func(item)
@contextfilter
def do_select(*args, **kwargs):
"""Filters a sequence of objects by applying a test to the object and only
selecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ numbers|select("odd") }}
{{ numbers|select("odd") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: x, False)
@contextfilter
def do_reject(*args, **kwargs):
"""Filters a sequence of objects by applying a test to the object and
rejecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ numbers|reject("odd") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: not x, False)
@contextfilter
def do_selectattr(*args, **kwargs):
"""Filters a sequence of objects by applying a test to an attribute of an
object and only selecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ users|selectattr("is_active") }}
{{ users|selectattr("email", "none") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: x, True)
@contextfilter
def do_rejectattr(*args, **kwargs):
"""Filters a sequence of objects by applying a test to an attribute of an
object or the attribute and rejecting the ones with the test succeeding.
.. sourcecode:: jinja
{{ users|rejectattr("is_active") }}
{{ users|rejectattr("email", "none") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: not x, True)
def _select_or_reject(args, kwargs, modfunc, lookup_attr):
context = args[0]
seq = args[1]
if lookup_attr:
try:
attr = args[2]
except LookupError:
raise FilterArgumentError('Missing parameter for attribute name')
transfunc = make_attrgetter(context.environment, attr)
off = 1
else:
off = 0
transfunc = lambda x: x
try:
name = args[2 + off]
args = args[3 + off:]
func = lambda item: context.environment.call_test(
name, item, args, kwargs)
except LookupError:
func = bool
if seq:
for item in seq:
if modfunc(func(transfunc(item))):
yield item
FILTERS = {
'attr': do_attr,
'replace': do_replace,
'upper': do_upper,
'lower': do_lower,
'escape': escape,
'e': escape,
'forceescape': do_forceescape,
'capitalize': do_capitalize,
'title': do_title,
'default': do_default,
'd': do_default,
'join': do_join,
'count': len,
'dictsort': do_dictsort,
'sort': do_sort,
'length': len,
'reverse': do_reverse,
'center': do_center,
'indent': do_indent,
'title': do_title,
'capitalize': do_capitalize,
'first': do_first,
'last': do_last,
'map': do_map,
'random': do_random,
'reject': do_reject,
'rejectattr': do_rejectattr,
'filesizeformat': do_filesizeformat,
'pprint': do_pprint,
'truncate': do_truncate,
'wordwrap': do_wordwrap,
'wordcount': do_wordcount,
'int': do_int,
'float': do_float,
'string': soft_unicode,
'list': do_list,
'urlize': do_urlize,
'format': do_format,
'trim': do_trim,
'striptags': do_striptags,
'select': do_select,
'selectattr': do_selectattr,
'slice': do_slice,
'batch': do_batch,
'sum': do_sum,
'abs': abs,
'round': do_round,
'groupby': do_groupby,
'safe': do_mark_safe,
'xmlattr': do_xmlattr,
'urlencode': do_urlencode
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('formbuilder', '0005_auto_20150826_1600'),
]
operations = [
migrations.RemoveField(
model_name='choiceanswer',
name='option',
),
migrations.AddField(
model_name='choiceanswer',
name='choices',
field=models.ManyToManyField(related_name='answers', to='formbuilder.Option'),
),
migrations.AddField(
model_name='choiceanswer',
name='other',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='choiceanswer',
name='question',
field=models.ForeignKey(related_name='answers', to='formbuilder.Choice', null=True),
),
]
|
"""
extend TiddlyWiki serialization to optionally use beta or
externalized releases and add the UniversalBackstage.
activated via "twrelease=beta" URL parameter or ServerSettings,
see build_config_var
"""
import logging
from tiddlyweb.util import read_utf8_file
from tiddlywebwiki.serialization import Serialization as WikiSerialization
from tiddlywebplugins.tiddlyspace.web import (determine_host,
determine_space, determine_space_recipe)
LOGGER = logging.getLogger(__name__)
def build_config_var(beta=False, external=False):
"""
Create the configuration key which will be used to locate
the base tiddlywiki file.
"""
base = 'base_tiddlywiki'
if external:
base += '_external'
if beta:
base += '_beta'
return base
class Serialization(WikiSerialization):
"""
Subclass of the standard TiddlyWiki serialization to allow
choosing beta or externalized versions of the base empty.html
in which the tiddlers will be servered.
Also, if the TiddlyWiki is not being downloaded, add
the UniversalBackstage by injecting a script tag.
"""
def list_tiddlers(self, tiddlers):
"""
Override tiddlers.link so the location in noscript is to
/tiddlers.
"""
http_host, _ = determine_host(self.environ)
space_name = determine_space(self.environ, http_host)
if space_name:
recipe_name = determine_space_recipe(self.environ, space_name)
if '/recipes/%s' % recipe_name in tiddlers.link:
tiddlers.link = '/tiddlers'
return WikiSerialization.list_tiddlers(self, tiddlers)
def _get_wiki(self):
beta = external = False
release = self.environ.get('tiddlyweb.query', {}).get(
'twrelease', [False])[0]
externalize = self.environ.get('tiddlyweb.query', {}).get(
'external', [False])[0]
download = self.environ.get('tiddlyweb.query', {}).get(
'download', [False])[0]
if release == 'beta':
beta = True
if externalize:
external = True
# If somebody is downloading, don't allow them to
# externalize.
if download:
external = False
wiki = None
if beta or external:
config_var = build_config_var(beta, external)
LOGGER.debug('looking for %s', config_var)
base_wiki_file = self.environ.get('tiddlyweb.config',
{}).get(config_var, '')
if base_wiki_file:
LOGGER.debug('using %s as base_tiddlywiki', base_wiki_file)
wiki = read_utf8_file(base_wiki_file)
if not wiki:
wiki = WikiSerialization._get_wiki(self)
tag = "<!--POST-SCRIPT-START-->"
if not download:
wiki = wiki.replace(tag, '<script type="text/javascript" '
'src="/bags/common/tiddlers/backstage.js"></script> %s' % tag)
return wiki
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['RelativeDifference'] , ['ConstantTrend'] , ['NoCycle'] , ['LSTM'] );
|
"""
Django settings for example_site project.
Generated by 'django-admin startproject' using Django 1.8.dev20150302062936.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
import os
import environ
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
env = environ.Env()
SECRET_KEY = "fbaa1unu0e8z5@9mm%k#+*d@iny*=-)ma2b#ymq)o9z^3%ijh)"
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = (
"address",
"person",
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
)
MIDDLEWARE = (
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
)
ROOT_URLCONF = "example_site.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "example_site.wsgi.application"
GOOGLE_API_KEY = "" # Specify your Google API key here
GOOGLE_API_KEY = os.environ.get("GOOGLE_API_KEY", GOOGLE_API_KEY)
DATABASES = {
"default": env.db(),
}
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = "/static/"
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
"""
Implements rotations, including spherical rotations as defined in WCS Paper II
[1]_
`RotateNative2Celestial` and `RotateCelestial2Native` follow the convention in
WCS Paper II to rotate to/from a native sphere and the celestial sphere.
The implementation uses `EulerAngleRotation`. The model parameters are
three angles: the longitude (``lon``) and latitude (``lat``) of the fiducial point
in the celestial system (``CRVAL`` keywords in FITS), and the longitude of the celestial
pole in the native system (``lon_pole``). The Euler angles are ``lon+90``, ``90-lat``
and ``-(lon_pole-90)``.
References
----------
.. [1] Calabretta, M.R., Greisen, E.W., 2002, A&A, 395, 1077 (Paper II)
"""
import math
import numpy as np
from .core import Model
from .parameters import Parameter
from astropy.coordinates.matrix_utilities import rotation_matrix, matrix_product
from astropy import units as u
from .utils import _to_radian, _to_orig_unit
__all__ = ['RotateCelestial2Native', 'RotateNative2Celestial', 'Rotation2D',
'EulerAngleRotation', 'RotationSequence3D', 'SphericalRotationSequence']
def _create_matrix(angles, axes_order):
matrices = []
for angle, axis in zip(angles, axes_order):
if isinstance(angle, u.Quantity):
angle = angle.value
angle = angle.item()
matrices.append(rotation_matrix(angle, axis, unit=u.rad))
result = matrix_product(*matrices[::-1])
return result
def spherical2cartesian(alpha, delta):
alpha = np.deg2rad(alpha)
delta = np.deg2rad(delta)
x = np.cos(alpha) * np.cos(delta)
y = np.cos(delta) * np.sin(alpha)
z = np.sin(delta)
return np.array([x, y, z])
def cartesian2spherical(x, y, z):
h = np.hypot(x, y)
alpha = np.rad2deg(np.arctan2(y, x))
delta = np.rad2deg(np.arctan2(z, h))
return alpha, delta
class RotationSequence3D(Model):
"""
Perform a series of rotations about different axis in 3D space.
Positive angles represent a counter-clockwise rotation.
Parameters
----------
angles : array-like
Angles of rotation in deg in the order of axes_order.
axes_order : str
A sequence of 'x', 'y', 'z' corresponding to axis of rotation.
Examples
--------
>>> model = RotationSequence3D([1.1, 2.1, 3.1, 4.1], axes_order='xyzx')
"""
standard_broadcasting = False
_separable = False
n_inputs = 3
n_outputs = 3
angles = Parameter(default=[], getter=_to_orig_unit, setter=_to_radian)
def __init__(self, angles, axes_order, name=None):
self.axes = ['x', 'y', 'z']
unrecognized = set(axes_order).difference(self.axes)
if unrecognized:
raise ValueError("Unrecognized axis label {0}; "
"should be one of {1} ".format(unrecognized,
self.axes))
self.axes_order = axes_order
if len(angles) != len(axes_order):
raise ValueError("The number of angles {0} should match the number \
of axes {1}.".format(len(angles),
len(axes_order)))
super().__init__(angles, name=name)
self._inputs = ('x', 'y', 'z')
self._outputs = ('x', 'y', 'z')
@property
def inverse(self):
"""Inverse rotation."""
angles = self.angles.value[::-1] * -1
return self.__class__(angles, axes_order=self.axes_order[::-1])
def evaluate(self, x, y, z, angles):
"""
Apply the rotation to a set of 3D Cartesian coordinates.
"""
if x.shape != y.shape != z.shape:
raise ValueError("Expected input arrays to have the same shape")
# Note: If the original shape was () (an array scalar) convert to a
# 1-element 1-D array on output for consistency with most other models
orig_shape = x.shape or (1,)
inarr = np.array([x.flatten(), y.flatten(), z.flatten()])
result = np.dot(_create_matrix(angles[0], self.axes_order), inarr)
x, y, z = result[0], result[1], result[2]
x.shape = y.shape = z.shape = orig_shape
return x, y, z
class SphericalRotationSequence(RotationSequence3D):
"""
Perform a sequence of rotations about arbitrary number of axes
in spherical coordinates.
Parameters
----------
angles : list
A sequence of angles (in deg).
axes_order : str
A sequence of characters ('x', 'y', or 'z') corresponding to the
axis of rotation and matching the order in ``angles``.
"""
def __init__(self, angles, axes_order, name=None, **kwargs):
self._n_inputs = 2
self._n_outputs = 2
super().__init__(angles, axes_order=axes_order, name=name, **kwargs)
self._inputs = ("lon", "lat")
self._outputs = ("lon", "lat")
@property
def n_inputs(self):
return self._n_inputs
@property
def n_outputs(self):
return self._n_outputs
def evaluate(self, lon, lat, angles):
x, y, z = spherical2cartesian(lon, lat)
x1, y1, z1 = super().evaluate(x, y, z, angles)
lon, lat = cartesian2spherical(x1, y1, z1)
return lon, lat
class _EulerRotation:
"""
Base class which does the actual computation.
"""
_separable = False
def evaluate(self, alpha, delta, phi, theta, psi, axes_order):
shape = None
if isinstance(alpha, np.ndarray) and alpha.ndim == 2:
alpha = alpha.flatten()
delta = delta.flatten()
shape = alpha.shape
inp = spherical2cartesian(alpha, delta)
matrix = _create_matrix([phi, theta, psi], axes_order)
result = np.dot(matrix, inp)
a, b = cartesian2spherical(*result)
if shape is not None:
a.shape = shape
b.shape = shape
return a, b
_input_units_strict = True
_input_units_allow_dimensionless = True
@property
def input_units(self):
""" Input units. """
return {'alpha': u.deg, 'delta': u.deg}
@property
def return_units(self):
""" Output units. """
return {'alpha': u.deg, 'delta': u.deg}
class EulerAngleRotation(_EulerRotation, Model):
"""
Implements Euler angle intrinsic rotations.
Rotates one coordinate system into another (fixed) coordinate system.
All coordinate systems are right-handed. The sign of the angles is
determined by the right-hand rule..
Parameters
----------
phi, theta, psi : float or `~astropy.units.Quantity`
"proper" Euler angles in deg.
If floats, they should be in deg.
axes_order : str
A 3 character string, a combination of 'x', 'y' and 'z',
where each character denotes an axis in 3D space.
"""
n_inputs = 2
n_outputs = 2
phi = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
theta = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
psi = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, phi, theta, psi, axes_order, **kwargs):
self.axes = ['x', 'y', 'z']
if len(axes_order) != 3:
raise TypeError(
"Expected axes_order to be a character sequence of length 3,"
"got {}".format(axes_order))
unrecognized = set(axes_order).difference(self.axes)
if unrecognized:
raise ValueError("Unrecognized axis label {}; "
"should be one of {} ".format(unrecognized, self.axes))
self.axes_order = axes_order
qs = [isinstance(par, u.Quantity) for par in [phi, theta, psi]]
if any(qs) and not all(qs):
raise TypeError("All parameters should be of the same type - float or Quantity.")
super().__init__(phi=phi, theta=theta, psi=psi, **kwargs)
self._inputs = ('alpha', 'delta')
self._outputs = ('alpha', 'delta')
def inverse(self):
return self.__class__(phi=-self.psi,
theta=-self.theta,
psi=-self.phi,
axes_order=self.axes_order[::-1])
def evaluate(self, alpha, delta, phi, theta, psi):
a, b = super().evaluate(alpha, delta, phi, theta, psi, self.axes_order)
return a, b
class _SkyRotation(_EulerRotation, Model):
"""
Base class for RotateNative2Celestial and RotateCelestial2Native.
"""
lon = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
lat = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
lon_pole = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, lon, lat, lon_pole, **kwargs):
qs = [isinstance(par, u.Quantity) for par in [lon, lat, lon_pole]]
if any(qs) and not all(qs):
raise TypeError("All parameters should be of the same type - float or Quantity.")
super().__init__(lon, lat, lon_pole, **kwargs)
self.axes_order = 'zxz'
def _evaluate(self, phi, theta, lon, lat, lon_pole):
alpha, delta = super().evaluate(phi, theta, lon, lat, lon_pole,
self.axes_order)
mask = alpha < 0
if isinstance(mask, np.ndarray):
alpha[mask] += 360
else:
alpha += 360
return alpha, delta
class RotateNative2Celestial(_SkyRotation):
"""
Transform from Native to Celestial Spherical Coordinates.
Parameters
----------
lon : float or or `~astropy.units.Quantity`
Celestial longitude of the fiducial point.
lat : float or or `~astropy.units.Quantity`
Celestial latitude of the fiducial point.
lon_pole : float or or `~astropy.units.Quantity`
Longitude of the celestial pole in the native system.
Notes
-----
If ``lon``, ``lat`` and ``lon_pole`` are numerical values they
should be in units of deg. Inputs are angles on the native sphere.
Outputs are angles on the celestial sphere.
"""
n_inputs = 2
n_outputs = 2
@property
def input_units(self):
""" Input units. """
return {'phi_N': u.deg, 'theta_N': u.deg}
@property
def return_units(self):
""" Output units. """
return {'alpha_C': u.deg, 'delta_C': u.deg}
def __init__(self, lon, lat, lon_pole, **kwargs):
super().__init__(lon, lat, lon_pole, **kwargs)
self.inputs = ('phi_N', 'theta_N')
self.outputs = ('alpha_C', 'delta_C')
def evaluate(self, phi_N, theta_N, lon, lat, lon_pole):
"""
Parameters
----------
phi_N, theta_N : float (deg) or `~astropy.units.Quantity`
Angles in the Native coordinate system.
lon, lat, lon_pole : float (in deg) or `~astropy.units.Quantity`
Parameter values when the model was initialized.
Returns
-------
alpha_C, delta_C : float (deg) or `~astropy.units.Quantity`
Angles on the Celestial sphere.
"""
# The values are in radians since they have already been through the setter.
if isinstance(lon, u.Quantity):
lon = lon.value
lat = lat.value
lon_pole = lon_pole.value
# Convert to Euler angles
phi = lon_pole - np.pi / 2
theta = - (np.pi / 2 - lat)
psi = -(np.pi / 2 + lon)
alpha_C, delta_C = super()._evaluate(phi_N, theta_N, phi, theta, psi)
return alpha_C, delta_C
@property
def inverse(self):
# convert to angles on the celestial sphere
return RotateCelestial2Native(self.lon, self.lat, self.lon_pole)
class RotateCelestial2Native(_SkyRotation):
"""
Transform from Celestial to Native Spherical Coordinates.
Parameters
----------
lon : float or or `~astropy.units.Quantity`
Celestial longitude of the fiducial point.
lat : float or or `~astropy.units.Quantity`
Celestial latitude of the fiducial point.
lon_pole : float or or `~astropy.units.Quantity`
Longitude of the celestial pole in the native system.
Notes
-----
If ``lon``, ``lat`` and ``lon_pole`` are numerical values they should be
in units of deg. Inputs are angles on the celestial sphere.
Outputs are angles on the native sphere.
"""
n_inputs = 2
n_outputs = 2
@property
def input_units(self):
""" Input units. """
return {'alpha_C': u.deg, 'delta_C': u.deg}
@property
def return_units(self):
""" Output units. """
return {'phi_N': u.deg, 'theta_N': u.deg}
def __init__(self, lon, lat, lon_pole, **kwargs):
super().__init__(lon, lat, lon_pole, **kwargs)
# Inputs are angles on the celestial sphere
self.inputs = ('alpha_C', 'delta_C')
# Outputs are angles on the native sphere
self.outputs = ('phi_N', 'theta_N')
def evaluate(self, alpha_C, delta_C, lon, lat, lon_pole):
"""
Parameters
----------
alpha_C, delta_C : float (deg) or `~astropy.units.Quantity`
Angles in the Celestial coordinate frame.
lon, lat, lon_pole : float (deg) or `~astropy.units.Quantity`
Parameter values when the model was initialized.
Returns
-------
phi_N, theta_N : float (deg) or `~astropy.units.Quantity`
Angles on the Native sphere.
"""
if isinstance(lon, u.Quantity):
lon = lon.value
lat = lat.value
lon_pole = lon_pole.value
# Convert to Euler angles
phi = (np.pi / 2 + lon)
theta = (np.pi / 2 - lat)
psi = -(lon_pole - np.pi / 2)
phi_N, theta_N = super()._evaluate(alpha_C, delta_C, phi, theta, psi)
return phi_N, theta_N
@property
def inverse(self):
return RotateNative2Celestial(self.lon, self.lat, self.lon_pole)
class Rotation2D(Model):
"""
Perform a 2D rotation given an angle.
Positive angles represent a counter-clockwise rotation and vice-versa.
Parameters
----------
angle : float or `~astropy.units.Quantity`
Angle of rotation (if float it should be in deg).
"""
n_inputs = 2
n_outputs = 2
_separable = False
angle = Parameter(default=0.0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, angle=angle, **kwargs):
super().__init__(angle=angle, **kwargs)
self._inputs = ("x", "y")
self._outputs = ("x", "y")
@property
def inverse(self):
"""Inverse rotation."""
return self.__class__(angle=-self.angle)
@classmethod
def evaluate(cls, x, y, angle):
"""
Rotate (x, y) about ``angle``.
Parameters
----------
x, y : ndarray-like
Input quantities
angle : float (deg) or `~astropy.units.Quantity`
Angle of rotations.
"""
if x.shape != y.shape:
raise ValueError("Expected input arrays to have the same shape")
# If one argument has units, enforce they both have units and they are compatible.
x_unit = getattr(x, 'unit', None)
y_unit = getattr(y, 'unit', None)
has_units = x_unit is not None and y_unit is not None
if x_unit != y_unit:
if has_units and y_unit.is_equivalent(x_unit):
y = y.to(x_unit)
y_unit = x_unit
else:
raise u.UnitsError("x and y must have compatible units")
# Note: If the original shape was () (an array scalar) convert to a
# 1-element 1-D array on output for consistency with most other models
orig_shape = x.shape or (1,)
inarr = np.array([x.flatten(), y.flatten()])
if isinstance(angle, u.Quantity):
angle = angle.to_value(u.rad)
result = np.dot(cls._compute_matrix(angle), inarr)
x, y = result[0], result[1]
x.shape = y.shape = orig_shape
if has_units:
return u.Quantity(x, unit=x_unit), u.Quantity(y, unit=y_unit)
else:
return x, y
@staticmethod
def _compute_matrix(angle):
return np.array([[math.cos(angle), -math.sin(angle)],
[math.sin(angle), math.cos(angle)]],
dtype=np.float64)
|
"""Fichier contenant le paramètre 'liste' de la commande 'matelot'."""
from primaires.format.fonctions import supprimer_accents
from primaires.format.tableau import Tableau
from primaires.interpreteur.masque.parametre import Parametre
from secondaires.navigation.equipage.postes.hierarchie import ORDRE
class PrmListe(Parametre):
"""Commande 'matelot liste'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "liste", "list")
self.tronquer = True
self.aide_courte = "liste les matelots de l'équipage"
self.aide_longue = \
"Cette commande liste les matelots de votre équipage. " \
"Elle permet d'obtenir rapidement des informations pratiques " \
"sur le nom du matelot ainsi que l'endroit où il se trouve."
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
salle = personnage.salle
if not hasattr(salle, "navire"):
personnage << "|err|Vous n'êtes pas sur un navire.|ff|"
return
navire = salle.navire
equipage = navire.equipage
if not navire.a_le_droit(personnage, "officier"):
personnage << "|err|Vous ne pouvez donner d'ordre sur ce " \
"navire.|ff|"
return
matelots = tuple((m, m.nom_poste) for m in \
equipage.matelots.values())
matelots += tuple(equipage.joueurs.items())
matelots = sorted(matelots, \
key=lambda couple: ORDRE.index(couple[1]), reverse=True)
if len(matelots) == 0:
personnage << "|err|Votre équipage ne comprend aucun matelot.|ff|"
return
tableau = Tableau()
tableau.ajouter_colonne("Nom")
tableau.ajouter_colonne("Poste")
tableau.ajouter_colonne("Affectation")
for matelot, nom_poste in matelots:
nom = matelot.nom
nom_poste = nom_poste.capitalize()
titre = "Aucune"
if hasattr(matelot, "personnage"):
titre = matelot.personnage.salle.titre_court.capitalize()
tableau.ajouter_ligne(nom, nom_poste, titre)
personnage << tableau.afficher()
|
import time, copy
import os, os.path
import sys
import numpy
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from scipy import optimize
from echem_plate_ui import *
from echem_plate_math import *
import pickle
p1='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots/2012-9_FeCoNiTi_500C_fastCPCV_plate1_dlist_1066.dat'
p2='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots/2012-9_FeCoNiTi_500C_fastCPCV_plate1_dlist_1662.dat'
pill='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots/2012-9FeCoNiTi_500C_CAill_plate1_dlist_1164.dat'
os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots')
vshift=-.24
imult=1.e6
cai0, cai1=(0, 6500)
f=open(p1, mode='r')
d1=pickle.load(f)
f.close()
f=open(p2, mode='r')
d2=pickle.load(f)
f.close()
f=open(pill, mode='r')
dill=pickle.load(f)
f.close()
segd1up, segd1dn=d1['segprops_dlist']
i1up=d1['I(A)'][segd1up['inds']][4:]
lin1up=i1up-d1['I(A)_LinSub'][segd1up['inds']][4:]
v1up=d1['Ewe(V)'][segd1up['inds']][4:]+vshift
i1dn=d1['I(A)'][segd1dn['inds']]
v1dn=d1['Ewe(V)'][segd1dn['inds']]+vshift
i1up*=imult
i1dn*=imult
lin1up*=imult
segd2up, segd2dn=d2['segprops_dlist']
i2up=d2['I(A)'][segd2up['inds']][4:]
lin2up=i2up-d2['I(A)_LinSub'][segd2up['inds']][4:]
v2up=d2['Ewe(V)'][segd2up['inds']][4:]+vshift
i2dn=d2['I(A)'][segd2dn['inds']]
v2dn=d2['Ewe(V)'][segd2dn['inds']]+vshift
i2up*=imult
i2dn*=imult
lin2up*=imult
ica=dill['I(A)_SG'][cai0:cai1]*imult
icadiff=dill['Idiff_time'][cai0:cai1]*imult
tca=dill['t(s)'][cai0:cai1]
tca_cycs=dill['till_cycs']
cycinds=numpy.where((tca_cycs>=tca.min())&(tca_cycs<=tca.max()))[0]
tca_cycs=tca_cycs[cycinds]
iphoto_cycs=dill['Photocurrent_cycs(A)'][cycinds]*imult
pylab.rc('font', family='serif', serif='Times New Roman', size=11)
fig=pylab.figure(figsize=(3.5, 4.5))
ax1=fig.add_axes((.2, .6, .74, .35))
ax2=fig.add_axes((.2, .11, .6, .35))
ax3=ax2.twinx()
ax1.plot(v1up, i1up, 'g-', linewidth=1.)
ax1.plot(v1up, lin1up, 'g:', linewidth=1.)
ax1.plot(v1dn, i1dn, 'g--', linewidth=1.)
ax1.plot(v2up, i2up, 'b-', linewidth=1.)
ax1.plot(v2up, lin2up, 'b:', linewidth=1.)
ax1.plot(v2dn, i2dn, 'b--', linewidth=1.)
ax1.set_xlim((-.1, .62))
ax1.set_ylim((-40, 130))
ax1.set_xlabel('Potential (V vs H$_2$O/O$_2$)', fontsize=12)
ax1.set_ylabel('Current ($\mu$A)', fontsize=12)
ax2.plot(tca, ica, 'k-')
ax2.plot(tca, icadiff, 'b--', linewidth=2)
ax2.set_xlim((0, 6.5))
ax2.set_ylim((0, 0.4))
ax3.plot(tca_cycs, iphoto_cycs, 'ro-')
ax3.set_ylim((0, 0.1))
ax2.set_xlabel('Elapsed time (s)', fontsize=12)
ax2.set_ylabel('Current ($\mu$A)', fontsize=12)
ax3.set_ylabel('Photocurrent ($\mu$A)', fontsize=12)
pylab.show()
print ''.join(['%s%.3f' %tup for tup in zip(dill['elements'], dill['compositions'])])
print ''.join(['%s%.3f' %tup for tup in zip(d1['elements'], d1['compositions'])])
print ''.join(['%s%.3f' %tup for tup in zip(d2['elements'], d2['compositions'])])
|
""" Documentation package """
import neuroptikon
import wx, wx.html
import os.path, sys, urllib
_sharedFrame = None
def baseURL():
if neuroptikon.runningFromSource:
basePath = os.path.join(neuroptikon.rootDir, 'documentation', 'build', 'Documentation')
else:
basePath = os.path.join(neuroptikon.rootDir, 'documentation')
return 'file:' + urllib.pathname2url(basePath) + '/'
def showPage(page):
pageURL = baseURL() + page
# Try to open an embedded WebKit-based help browser.
try:
import documentation_frame
documentation_frame.showPage(pageURL)
except:
# Fall back to using the user's default browser outside of Neuroptikon.
wx.LaunchDefaultBrowser(pageURL)
|
from __future__ import absolute_import, unicode_literals
import logging
import os
from haas.plugins.discoverer import match_path
from haas.plugins.i_discoverer_plugin import IDiscovererPlugin
from .yaml_test_loader import YamlTestLoader
logger = logging.getLogger(__name__)
class RestTestDiscoverer(IDiscovererPlugin):
"""A ``haas`` test discovery plugin to generate Web API test cases from
YAML descriptions.
Parameters
----------
loader : haas.loader.Loader
The ``haas`` test loader.
"""
def __init__(self, loader, **kwargs):
super(RestTestDiscoverer, self).__init__(**kwargs)
self._loader = loader
self._yaml_loader = YamlTestLoader(loader)
@classmethod
def from_args(cls, args, arg_prefix, loader):
"""Construct the discoverer from parsed command line arguments.
Parameters
----------
args : argparse.Namespace
The ``argparse.Namespace`` containing parsed arguments.
arg_prefix : str
The prefix used for arguments beloning solely to this plugin.
loader : haas.loader.Loader
The test loader used to construct TestCase and TestSuite instances.
"""
return cls(loader)
@classmethod
def add_parser_arguments(cls, parser, option_prefix, dest_prefix):
"""Add options for the plugin to the main argument parser.
Parameters
----------
parser : argparse.ArgumentParser
The parser to extend
option_prefix : str
The prefix that option strings added by this plugin should use.
dest_prefix : str
The prefix that ``dest`` strings for options added by this
plugin should use.
"""
def discover(self, start, top_level_directory=None, pattern=None):
"""Discover YAML-formatted Web API tests.
Parameters
----------
start : str
Directory from which to recursively discover test cases.
top_level_directory : None
Ignored; for API compatibility with haas.
pattern : None
Ignored; for API compatibility with haas.
"""
if os.path.isdir(start):
start_directory = start
return self._discover_by_directory(start_directory)
elif os.path.isfile(start):
start_filepath = start
return self._discover_by_file(start_filepath)
return self._loader.create_suite()
def _discover_by_directory(self, start_directory):
"""Run test discovery in a directory.
Parameters
----------
start_directory : str
The package directory in which to start test discovery.
"""
start_directory = os.path.abspath(start_directory)
tests = self._discover_tests(start_directory)
return self._loader.create_suite(list(tests))
def _discover_by_file(self, start_filepath):
"""Run test discovery on a single file.
Parameters
----------
start_filepath : str
The module file in which to start test discovery.
"""
start_filepath = os.path.abspath(start_filepath)
logger.debug('Discovering tests in file: start_filepath=%r',
start_filepath)
tests = self._load_from_file(start_filepath)
return self._loader.create_suite(list(tests))
def _load_from_file(self, filepath):
logger.debug('Loading tests from %r', filepath)
tests = self._yaml_loader.load_tests_from_file(filepath)
return self._loader.create_suite(tests)
def _discover_tests(self, start_directory):
pattern = 'test*.yml'
for curdir, dirnames, filenames in os.walk(start_directory):
logger.debug('Discovering tests in %r', curdir)
for filename in filenames:
filepath = os.path.join(curdir, filename)
if not match_path(filename, filepath, pattern):
logger.debug('Skipping %r', filepath)
continue
yield self._load_from_file(filepath)
|
from __future__ import unicode_literals
from django.forms import ValidationError
from django.core.exceptions import NON_FIELD_ERRORS
from django.forms.formsets import TOTAL_FORM_COUNT
from django.forms.models import (
BaseModelFormSet, modelformset_factory,
ModelForm, _get_foreign_key, ModelFormMetaclass, ModelFormOptions
)
from django.db.models.fields.related import ForeignObjectRel
from modelcluster.models import get_all_child_relations
class BaseTransientModelFormSet(BaseModelFormSet):
""" A ModelFormSet that doesn't assume that all its initial data instances exist in the db """
def _construct_form(self, i, **kwargs):
# Need to override _construct_form to avoid calling to_python on an empty string PK value
if self.is_bound and i < self.initial_form_count():
pk_key = "%s-%s" % (self.add_prefix(i), self.model._meta.pk.name)
pk = self.data[pk_key]
if pk == '':
kwargs['instance'] = self.model()
else:
pk_field = self.model._meta.pk
to_python = self._get_to_python(pk_field)
pk = to_python(pk)
kwargs['instance'] = self._existing_object(pk)
if i < self.initial_form_count() and 'instance' not in kwargs:
kwargs['instance'] = self.get_queryset()[i]
if i >= self.initial_form_count() and self.initial_extra:
# Set initial values for extra forms
try:
kwargs['initial'] = self.initial_extra[i - self.initial_form_count()]
except IndexError:
pass
# bypass BaseModelFormSet's own _construct_form
return super(BaseModelFormSet, self)._construct_form(i, **kwargs)
def save_existing_objects(self, commit=True):
# Need to override _construct_form so that it doesn't skip over initial forms whose instance
# has a blank PK (which is taken as an indication that the form was constructed with an
# instance not present in our queryset)
self.changed_objects = []
self.deleted_objects = []
if not self.initial_forms:
return []
saved_instances = []
forms_to_delete = self.deleted_forms
for form in self.initial_forms:
obj = form.instance
if form in forms_to_delete:
if obj.pk is None:
# no action to be taken to delete an object which isn't in the database
continue
self.deleted_objects.append(obj)
self.delete_existing(obj, commit=commit)
elif form.has_changed():
self.changed_objects.append((obj, form.changed_data))
saved_instances.append(self.save_existing(form, obj, commit=commit))
if not commit:
self.saved_forms.append(form)
return saved_instances
def transientmodelformset_factory(model, formset=BaseTransientModelFormSet, **kwargs):
return modelformset_factory(model, formset=formset, **kwargs)
class BaseChildFormSet(BaseTransientModelFormSet):
def __init__(self, data=None, files=None, instance=None, queryset=None, **kwargs):
if instance is None:
self.instance = self.fk.remote_field.model()
else:
self.instance = instance
self.rel_name = ForeignObjectRel(self.fk, self.fk.remote_field.model, related_name=self.fk.remote_field.related_name).get_accessor_name()
if queryset is None:
queryset = getattr(self.instance, self.rel_name).all()
super(BaseChildFormSet, self).__init__(data, files, queryset=queryset, **kwargs)
def save(self, commit=True):
# The base ModelFormSet's save(commit=False) will populate the lists
# self.changed_objects, self.deleted_objects and self.new_objects;
# use these to perform the appropriate updates on the relation's manager.
saved_instances = super(BaseChildFormSet, self).save(commit=False)
manager = getattr(self.instance, self.rel_name)
# if model has a sort_order_field defined, assign order indexes to the attribute
# named in it
if self.can_order and hasattr(self.model, 'sort_order_field'):
sort_order_field = getattr(self.model, 'sort_order_field')
for i, form in enumerate(self.ordered_forms):
setattr(form.instance, sort_order_field, i)
# If the manager has existing instances with a blank ID, we have no way of knowing
# whether these correspond to items in the submitted data. We'll assume that they do,
# as that's the most common case (i.e. the formset contains the full set of child objects,
# not just a selection of additions / updates) and so we delete all ID-less objects here
# on the basis that they will be re-added by the formset saving mechanism.
no_id_instances = [obj for obj in manager.all() if obj.pk is None]
if no_id_instances:
manager.remove(*no_id_instances)
manager.add(*saved_instances)
manager.remove(*self.deleted_objects)
self.save_m2m() # ensures any parental-m2m fields are saved.
if commit:
manager.commit()
return saved_instances
def clean(self, *args, **kwargs):
self.validate_unique()
return super(BaseChildFormSet, self).clean(*args, **kwargs)
def validate_unique(self):
'''This clean method will check for unique_together condition'''
# Collect unique_checks and to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
forms_to_delete = self.deleted_forms
valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete]
for form in valid_forms:
unique_checks, date_checks = form.instance._get_unique_checks()
all_unique_checks.update(unique_checks)
all_date_checks.update(date_checks)
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in valid_forms:
# Get the data for the set of fields that must be unique among the forms.
row_data = (
field if field in self.unique_fields else form.cleaned_data[field]
for field in unique_check if field in form.cleaned_data
)
# Reduce Model instances to their primary key values
row_data = tuple(d._get_pk_val() if hasattr(d, '_get_pk_val') else d
for d in row_data)
if row_data and None not in row_data:
# if we've already seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
for field in unique_check:
if field in form.cleaned_data:
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(row_data)
if errors:
raise ValidationError(errors)
def childformset_factory(
parent_model, model, form=ModelForm,
formset=BaseChildFormSet, fk_name=None, fields=None, exclude=None,
extra=3, can_order=False, can_delete=True, max_num=None, validate_max=False,
formfield_callback=None, widgets=None, min_num=None, validate_min=False
):
fk = _get_foreign_key(parent_model, model, fk_name=fk_name)
# enforce a max_num=1 when the foreign key to the parent model is unique.
if fk.unique:
max_num = 1
validate_max = True
if exclude is None:
exclude = []
exclude += [fk.name]
kwargs = {
'form': form,
'formfield_callback': formfield_callback,
'formset': formset,
'extra': extra,
'can_delete': can_delete,
# if the model supplies a sort_order_field, enable ordering regardless of
# the current setting of can_order
'can_order': (can_order or hasattr(model, 'sort_order_field')),
'fields': fields,
'exclude': exclude,
'max_num': max_num,
'validate_max': validate_max,
'widgets': widgets,
'min_num': min_num,
'validate_min': validate_min,
}
FormSet = transientmodelformset_factory(model, **kwargs)
FormSet.fk = fk
return FormSet
class ClusterFormOptions(ModelFormOptions):
def __init__(self, options=None):
super(ClusterFormOptions, self).__init__(options=options)
self.formsets = getattr(options, 'formsets', None)
self.exclude_formsets = getattr(options, 'exclude_formsets', None)
class ClusterFormMetaclass(ModelFormMetaclass):
extra_form_count = 3
@classmethod
def child_form(cls):
return ClusterForm
def __new__(cls, name, bases, attrs):
try:
parents = [b for b in bases if issubclass(b, ClusterForm)]
except NameError:
# We are defining ClusterForm itself.
parents = None
# grab any formfield_callback that happens to be defined in attrs -
# so that we can pass it on to child formsets - before ModelFormMetaclass deletes it.
# BAD METACLASS NO BISCUIT.
formfield_callback = attrs.get('formfield_callback')
new_class = super(ClusterFormMetaclass, cls).__new__(cls, name, bases, attrs)
if not parents:
return new_class
# ModelFormMetaclass will have set up new_class._meta as a ModelFormOptions instance;
# replace that with ClusterFormOptions so that we can access _meta.formsets
opts = new_class._meta = ClusterFormOptions(getattr(new_class, 'Meta', None))
if opts.model:
formsets = {}
for rel in get_all_child_relations(opts.model):
# to build a childformset class from this relation, we need to specify:
# - the base model (opts.model)
# - the child model (rel.field.model)
# - the fk_name from the child model to the base (rel.field.name)
rel_name = rel.get_accessor_name()
# apply 'formsets' and 'exclude_formsets' rules from meta
if opts.formsets is not None and rel_name not in opts.formsets:
continue
if opts.exclude_formsets and rel_name in opts.exclude_formsets:
continue
try:
widgets = opts.widgets.get(rel_name)
except AttributeError: # thrown if opts.widgets is None
widgets = None
kwargs = {
'extra': cls.extra_form_count,
'form': cls.child_form(),
'formfield_callback': formfield_callback,
'fk_name': rel.field.name,
'widgets': widgets
}
# see if opts.formsets looks like a dict; if so, allow the value
# to override kwargs
try:
kwargs.update(opts.formsets.get(rel_name))
except AttributeError:
pass
formset = childformset_factory(opts.model, rel.field.model, **kwargs)
formsets[rel_name] = formset
new_class.formsets = formsets
new_class._has_explicit_formsets = (opts.formsets is not None or opts.exclude_formsets is not None)
return new_class
class ClusterForm(ModelForm, metaclass=ClusterFormMetaclass):
def __init__(self, data=None, files=None, instance=None, prefix=None, **kwargs):
super(ClusterForm, self).__init__(data, files, instance=instance, prefix=prefix, **kwargs)
self.formsets = {}
for rel_name, formset_class in self.__class__.formsets.items():
if prefix:
formset_prefix = "%s-%s" % (prefix, rel_name)
else:
formset_prefix = rel_name
self.formsets[rel_name] = formset_class(data, files, instance=instance, prefix=formset_prefix)
if self.is_bound and not self._has_explicit_formsets:
# check which formsets have actually been provided as part of the form submission -
# if no `formsets` or `exclude_formsets` was specified, we allow them to be omitted
# (https://github.com/wagtail/wagtail/issues/5414#issuecomment-567468127).
self._posted_formsets = [
formset
for formset in self.formsets.values()
if '%s-%s' % (formset.prefix, TOTAL_FORM_COUNT) in self.data
]
else:
# expect all defined formsets to be part of the post
self._posted_formsets = self.formsets.values()
def as_p(self):
form_as_p = super(ClusterForm, self).as_p()
return form_as_p + ''.join([formset.as_p() for formset in self.formsets.values()])
def is_valid(self):
form_is_valid = super(ClusterForm, self).is_valid()
formsets_are_valid = all(formset.is_valid() for formset in self._posted_formsets)
return form_is_valid and formsets_are_valid
def is_multipart(self):
return (
super(ClusterForm, self).is_multipart()
or any(formset.is_multipart() for formset in self.formsets.values())
)
@property
def media(self):
media = super(ClusterForm, self).media
for formset in self.formsets.values():
media = media + formset.media
return media
def save(self, commit=True):
# do we have any fields that expect us to call save_m2m immediately?
save_m2m_now = False
exclude = self._meta.exclude
fields = self._meta.fields
for f in self.instance._meta.get_fields():
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
if getattr(f, '_need_commit_after_assignment', False):
save_m2m_now = True
break
instance = super(ClusterForm, self).save(commit=(commit and not save_m2m_now))
# The M2M-like fields designed for use with ClusterForm (currently
# ParentalManyToManyField and ClusterTaggableManager) will manage their own in-memory
# relations, and not immediately write to the database when we assign to them.
# For these fields (identified by the _need_commit_after_assignment
# flag), save_m2m() is a safe operation that does not affect the database and is thus
# valid for commit=False. In the commit=True case, committing to the database happens
# in the subsequent instance.save (so this needs to happen after save_m2m to ensure
# we have the updated relation data in place).
# For annoying legacy reasons we sometimes need to accommodate 'classic' M2M fields
# (particularly taggit.TaggableManager) within ClusterForm. These fields
# generally do require our instance to exist in the database at the point we call
# save_m2m() - for this reason, we only proceed with the customisation described above
# (i.e. postpone the instance.save() operation until after save_m2m) if there's a
# _need_commit_after_assignment field on the form that demands it.
if save_m2m_now:
self.save_m2m()
if commit:
instance.save()
for formset in self._posted_formsets:
formset.instance = instance
formset.save(commit=commit)
return instance
def has_changed(self):
"""Return True if data differs from initial."""
# Need to recurse over nested formsets so that the form is saved if there are changes
# to child forms but not the parent
if self.formsets:
for formset in self._posted_formsets:
for form in formset.forms:
if form.has_changed():
return True
return bool(self.changed_data)
|
"""
See http://pbpython.com/advanced-excel-workbooks.html for details on this script
"""
from __future__ import print_function
import pandas as pd
from xlsxwriter.utility import xl_rowcol_to_cell
def format_excel(writer, df_size):
""" Add Excel specific formatting to the workbook
df_size is a tuple representing the size of the dataframe - typically called
by df.shape -> (20,3)
"""
# Get the workbook and the summary sheet so we can add the formatting
workbook = writer.book
worksheet = writer.sheets['summary']
# Add currency formatting and apply it
money_fmt = workbook.add_format({'num_format': 42, 'align': 'center'})
worksheet.set_column('A:A', 20)
worksheet.set_column('B:C', 15, money_fmt)
# Add 1 to row so we can include a total
# subtract 1 from the column to handle because we don't care about index
table_end = xl_rowcol_to_cell(df_size[0] + 1, df_size[1] - 1)
# This assumes we start in the left hand corner
table_range = 'A1:{}'.format(table_end)
worksheet.add_table(table_range, {'columns': [{'header': 'account',
'total_string': 'Total'},
{'header': 'Total Sales',
'total_function': 'sum'},
{'header': 'Average Sales',
'total_function': 'average'}],
'autofilter': False,
'total_row': True,
'style': 'Table Style Medium 20'})
if __name__ == "__main__":
sales_df = pd.read_excel('https://github.com/chris1610/pbpython/blob/master/data/sample-salesv3.xlsx?raw=true')
sales_summary = sales_df.groupby(['name'])['ext price'].agg(['sum', 'mean'])
# Reset the index for consistency when saving in Excel
sales_summary.reset_index(inplace=True)
writer = pd.ExcelWriter('sales_summary.xlsx', engine='xlsxwriter')
sales_summary.to_excel(writer, 'summary', index=False)
format_excel(writer, sales_summary.shape)
writer.save()
|
"""Testing a sprite.
The ball should bounce off the sides of the window. You may resize the
window.
This test should just run without failing.
"""
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import os
import unittest
from pyglet.gl import glClear
import pyglet.window
import pyglet.window.event
from pyglet import clock
from scene2d import Sprite, Image2d, FlatView
from scene2d.image import TintEffect
from scene2d.camera import FlatCamera
ball_png = os.path.join(os.path.dirname(__file__), 'ball.png')
class BouncySprite(Sprite):
def update(self):
# move, check bounds
p = self.properties
self.x += p['dx']
self.y += p['dy']
if self.left < 0:
self.left = 0
p['dx'] = -p['dx']
elif self.right > 320:
self.right = 320
p['dx'] = -p['dx']
if self.bottom < 0:
self.bottom = 0
p['dy'] = -p['dy']
elif self.top > 320:
self.top = 320
p['dy'] = -p['dy']
class SpriteOverlapTest(unittest.TestCase):
def test_sprite(self):
w = pyglet.window.Window(width=320, height=320)
image = Image2d.load(ball_png)
ball1 = BouncySprite(0, 0, 64, 64, image, properties=dict(dx=10, dy=5))
ball2 = BouncySprite(288, 0, 64, 64, image,
properties=dict(dx=-10, dy=5))
view = FlatView(0, 0, 320, 320, sprites=[ball1, ball2])
view.fx, view.fy = 160, 160
clock.set_fps_limit(60)
e = TintEffect((.5, 1, .5, 1))
while not w.has_exit:
clock.tick()
w.dispatch_events()
ball1.update()
ball2.update()
if ball1.overlaps(ball2):
if 'overlap' not in ball2.properties:
ball2.properties['overlap'] = e
ball2.add_effect(e)
elif 'overlap' in ball2.properties:
ball2.remove_effect(e)
del ball2.properties['overlap']
view.clear()
view.draw()
w.flip()
w.close()
unittest.main()
|
import os
from subprocess import call, Popen, PIPE
import sys
from . import Command
from . import utils
class OpenSequenceInRV(Command):
"""%prog [options] [paths]
Open the latest version for each given entity.
"""
def run(self, sgfs, opts, args):
# Parse them all.
arg_to_movie = {}
arg_to_entity = {}
for arg in args:
if os.path.exists(arg):
arg_to_movie[arg] = arg
continue
print 'Parsing %r...' % arg
data = utils.parse_spec(sgfs, arg.split(), ['Shot'])
type_ = data.get('type')
id_ = data.get('id')
if not (type_ or id_):
print 'no entities found for', repr(arg)
return 1
arg_to_entity.setdefault(type_, {})[arg] = sgfs.session.merge(dict(type=type_, id=id_))
tasks = arg_to_entity.pop('Task', {})
shots = arg_to_entity.pop('Shot', {})
if arg_to_entity:
print 'found entities that were not Task or Shot:', ', '.join(sorted(arg_to_entity))
return 2
if tasks:
print 'Getting shots from tasks...'
sgfs.session.fetch(tasks.values(), 'entity')
for arg, task in tasks.iteritems():
shots[arg] = task['entity']
if shots:
print 'Getting versions from shots...'
sgfs.session.fetch(shots.values(), ('sg_latest_version.Version.sg_path_to_movie', 'sg_latest_version.Version.sg_path_to_frames'))
for arg, shot in shots.iteritems():
version = shot.get('sg_latest_version')
if not version:
print 'no version for', shot
return 3
path = version.get('sg_path_to_movie') or version.get('sg_path_to_frames')
if not path:
print 'no movie or frames for', version
return 4
arg_to_movie[arg] = path
movies = [arg_to_movie[arg] for arg in args]
print 'Opening:'
print '\t' + '\n\t'.join(movies)
rvlink = Popen(['rv', '-bakeURL'] + movies, stderr=PIPE).communicate()[1].strip().split()[-1]
self.open(rvlink)
def open(self, x):
if sys.platform.startswith('darwin'):
call(['open', x])
else:
call(['xdg-open', x])
run = OpenSequenceInRV()
|
"""
External serialization for testing remote module loading.
"""
from tiddlyweb.serializations import SerializationInterface
class Serialization(SerializationInterface):
def list_recipes(self, recipes):
print recipes
def list_bags(self, bags):
print bags
def recipe_as(self, recipe):
print "r_as: %s" % recipe
def as_recipe(self, recipe, input):
print "as_r: %s" % input
def bag_as(self, bag):
print "b_as: %s" % bag
def as_bag(self, bag, input):
print "as_b: %s" % input
def tiddler_as(self, tiddler):
print "t_as: %s" % tiddler
def as_tiddler(self, tiddler, input):
print "as_t: %s" % input
|
"""
This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
import copy
import gyp.common
import os
import os.path
import re
import shlex
import subprocess
import sys
import tempfile
from gyp.common import GypError
class XcodeSettings(object):
"""A class that understands the gyp 'xcode_settings' object."""
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
_sdk_root_cache = {}
# Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_plist_cache = {}
# Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_codesigning_key_cache = {}
# Populated lazily by _XcodeVersion. Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_xcode_version_cache = ()
def __init__(self, spec):
self.spec = spec
self.isIOS = False
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
# for that config -- the per-target settings as well. Settings that are
# the same for all configs are implicitly per-target settings.
self.xcode_settings = {}
configs = spec['configurations']
for configname, config in configs.iteritems():
self.xcode_settings[configname] = config.get('xcode_settings', {})
self._ConvertConditionalKeys(configname)
if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
None):
self.isIOS = True
# This is only non-None temporarily during the execution of some methods.
self.configname = None
# Used by _AdjustLibrary to match .a and .dylib entries in libraries.
self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
def _ConvertConditionalKeys(self, configname):
"""Converts or warns on conditional keys. Xcode supports conditional keys,
such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
with some keys converted while the rest force a warning."""
settings = self.xcode_settings[configname]
conditional_keys = [key for key in settings if key.endswith(']')]
for key in conditional_keys:
# If you need more, speak up at http://crbug.com/122592
if key.endswith("[sdk=iphoneos*]"):
if configname.endswith("iphoneos"):
new_key = key.split("[")[0]
settings[new_key] = settings[key]
else:
print 'Warning: Conditional keys not implemented, ignoring:', \
' '.join(conditional_keys)
del settings[key]
def _Settings(self):
assert self.configname
return self.xcode_settings[self.configname]
def _Test(self, test_key, cond_key, default):
return self._Settings().get(test_key, default) == cond_key
def _Appendf(self, lst, test_key, format_str, default=None):
if test_key in self._Settings():
lst.append(format_str % str(self._Settings()[test_key]))
elif default:
lst.append(format_str % str(default))
def _WarnUnimplemented(self, test_key):
if test_key in self._Settings():
print 'Warning: Ignoring not yet implemented key "%s".' % test_key
def _IsBundle(self):
return int(self.spec.get('mac_bundle', 0)) != 0
def GetFrameworkVersion(self):
"""Returns the framework version of the current target. Only valid for
bundles."""
assert self._IsBundle()
return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
def GetWrapperExtension(self):
"""Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('loadable_module', 'shared_library'):
default_wrapper_extension = {
'loadable_module': 'bundle',
'shared_library': 'framework',
}[self.spec['type']]
wrapper_extension = self.GetPerTargetSetting(
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
def GetProductName(self):
"""Returns PRODUCT_NAME."""
return self.spec.get('product_name', self.spec['target_name'])
def GetFullProductName(self):
"""Returns FULL_PRODUCT_NAME."""
if self._IsBundle():
return self.GetWrapperName()
else:
return self._GetStandaloneBinaryPath()
def GetWrapperName(self):
"""Returns the directory name of the bundle represented by this target.
Only valid for bundles."""
assert self._IsBundle()
return self.GetProductName() + self.GetWrapperExtension()
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
if self.isIOS:
return self.GetWrapperName()
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return os.path.join(
self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
else:
# loadable_modules have a 'Contents' folder like executables.
return os.path.join(self.GetWrapperName(), 'Contents')
def GetBundleResourceFolder(self):
"""Returns the qualified path to the bundle's resource folder. E.g.
Chromium.app/Contents/Resources. Only valid for bundles."""
assert self._IsBundle()
if self.isIOS:
return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
def GetBundlePlistPath(self):
"""Returns the qualified path to the bundle's plist file. E.g.
Chromium.app/Contents/Info.plist. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('executable', 'loadable_module'):
return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
else:
return os.path.join(self.GetBundleContentsFolderPath(),
'Resources', 'Info.plist')
def GetProductType(self):
"""Returns the PRODUCT_TYPE of this target."""
if self._IsBundle():
return {
'executable': 'com.apple.product-type.application',
'loadable_module': 'com.apple.product-type.bundle',
'shared_library': 'com.apple.product-type.framework',
}[self.spec['type']]
else:
return {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.apple.product-type.library.dynamic',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
}[self.spec['type']]
def GetMachOType(self):
"""Returns the MACH_O_TYPE of this target."""
# Weird, but matches Xcode.
if not self._IsBundle() and self.spec['type'] == 'executable':
return ''
return {
'executable': 'mh_execute',
'static_library': 'staticlib',
'shared_library': 'mh_dylib',
'loadable_module': 'mh_bundle',
}[self.spec['type']]
def _GetBundleBinaryPath(self):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('shared_library') or self.isIOS:
path = self.GetBundleContentsFolderPath()
elif self.spec['type'] in ('executable', 'loadable_module'):
path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
return os.path.join(path, self.GetExecutableName())
def _GetStandaloneExecutableSuffix(self):
if 'product_extension' in self.spec:
return '.' + self.spec['product_extension']
return {
'executable': '',
'static_library': '.a',
'shared_library': '.dylib',
'loadable_module': '.so',
}[self.spec['type']]
def _GetStandaloneExecutablePrefix(self):
return self.spec.get('product_prefix', {
'executable': '',
'static_library': 'lib',
'shared_library': 'lib',
# Non-bundled loadable_modules are called foo.so for some reason
# (that is, .so and no prefix) with the xcode build -- match that.
'loadable_module': '',
}[self.spec['type']])
def _GetStandaloneBinaryPath(self):
"""Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles."""
assert not self._IsBundle()
assert self.spec['type'] in (
'executable', 'shared_library', 'static_library', 'loadable_module'), (
'Unexpected type %s' % self.spec['type'])
target = self.spec['target_name']
if self.spec['type'] == 'static_library':
if target[:3] == 'lib':
target = target[3:]
elif self.spec['type'] in ('loadable_module', 'shared_library'):
if target[:3] == 'lib':
target = target[3:]
target_prefix = self._GetStandaloneExecutablePrefix()
target = self.spec.get('product_name', target)
target_ext = self._GetStandaloneExecutableSuffix()
return target_prefix + target + target_ext
def GetExecutableName(self):
"""Returns the executable name of the bundle represented by this target.
E.g. Chromium."""
if self._IsBundle():
return self.spec.get('product_name', self.spec['target_name'])
else:
return self._GetStandaloneBinaryPath()
def GetExecutablePath(self):
"""Returns the directory name of the bundle represented by this target. E.g.
Chromium.app/Contents/MacOS/Chromium."""
if self._IsBundle():
return self._GetBundleBinaryPath()
else:
return self._GetStandaloneBinaryPath()
def GetActiveArchs(self, configname):
"""Returns the architectures this target should be built for."""
# TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
# CURRENT_ARCH / NATIVE_ARCH env vars?
return self.xcode_settings[configname].get('ARCHS', [self._DefaultArch()])
def _GetStdout(self, cmdlist):
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
out = job.communicate()[0]
if job.returncode != 0:
sys.stderr.write(out + '\n')
raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
return out.rstrip('\n')
def _GetSdkVersionInfoItem(self, sdk, infoitem):
return self._GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
def _SdkRoot(self, configname):
if configname is None:
configname = self.configname
return self.GetPerConfigSetting('SDKROOT', configname, default='')
def _SdkPath(self, configname=None):
sdk_root = self._SdkRoot(configname)
if sdk_root.startswith('/'):
return sdk_root
return self._XcodeSdkPath(sdk_root)
def _XcodeSdkPath(self, sdk_root):
if sdk_root not in XcodeSettings._sdk_path_cache:
sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
if sdk_root:
XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
return XcodeSettings._sdk_path_cache[sdk_root]
def _AppendPlatformVersionMinFlags(self, lst):
self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
# TODO: Implement this better?
sdk_path_basename = os.path.basename(self._SdkPath())
if sdk_path_basename.lower().startswith('iphonesimulator'):
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-mios-simulator-version-min=%s')
else:
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-miphoneos-version-min=%s')
def GetCflags(self, configname, arch=None):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
compilations."""
# This functions (and the similar ones below) do not offer complete
# emulation of all xcode_settings keys. They're implemented on demand.
self.configname = configname
cflags = []
sdk_root = self._SdkPath()
if 'SDKROOT' in self._Settings():
cflags.append('-isysroot %s' % sdk_root)
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
cflags.append('-Wconstant-conversion')
if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
cflags.append('-funsigned-char')
if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
cflags.append('-fasm-blocks')
if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
cflags.append('-mdynamic-no-pic')
else:
pass
# TODO: In this case, it depends on the target. xcode passes
# mdynamic-no-pic by default for executable and possibly static lib
# according to mento
if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
cflags.append('-mpascal-strings')
self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
if dbg_format == 'dwarf':
cflags.append('-gdwarf-2')
elif dbg_format == 'stabs':
raise NotImplementedError('stabs debug format is not supported yet.')
elif dbg_format == 'dwarf-with-dsym':
cflags.append('-gdwarf-2')
else:
raise NotImplementedError('Unknown debug format %s' % dbg_format)
if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
cflags.append('-fstrict-aliasing')
elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
cflags.append('-fno-strict-aliasing')
if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
cflags.append('-fvisibility=hidden')
if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
cflags.append('-Werror')
if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
cflags.append('-Wnewline-eof')
self._AppendPlatformVersionMinFlags(cflags)
# TODO:
if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
self._WarnUnimplemented('COPY_PHASE_STRIP')
self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
# TODO: This is exported correctly, but assigning to it is not supported.
self._WarnUnimplemented('MACH_O_TYPE')
self._WarnUnimplemented('PRODUCT_TYPE')
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', [self._DefaultArch()])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
cflags.append('-arch ' + archs[0])
if archs[0] in ('i386', 'x86_64'):
if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse3')
if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
default='NO'):
cflags.append('-mssse3') # Note 3rd 's'.
if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.1')
if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.2')
cflags += self._Settings().get('WARNING_CFLAGS', [])
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
cflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
self.configname = None
return cflags
def GetCflagsC(self, configname):
"""Returns flags that need to be added to .c, and .m compilations."""
self.configname = configname
cflags_c = []
if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
cflags_c.append('-ansi')
else:
self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None
return cflags_c
def GetCflagsCC(self, configname):
"""Returns flags that need to be added to .cc, and .mm compilations."""
self.configname = configname
cflags_cc = []
clang_cxx_language_standard = self._Settings().get(
'CLANG_CXX_LANGUAGE_STANDARD')
# Note: Don't make c++0x to c++11 so that c++0x can be used with older
# clangs that don't understand c++11 yet (like Xcode 4.2's).
if clang_cxx_language_standard:
cflags_cc.append('-std=%s' % clang_cxx_language_standard)
self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
cflags_cc.append('-fno-rtti')
if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
cflags_cc.append('-fno-exceptions')
if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
cflags_cc.append('-fvisibility-inlines-hidden')
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
cflags_cc.append('-fno-threadsafe-statics')
# Note: This flag is a no-op for clang, it only has an effect for gcc.
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
cflags_cc.append('-Wno-invalid-offsetof')
other_ccflags = []
for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
# TODO: More general variable expansion. Missing in many other places too.
if flag in ('$inherited', '$(inherited)', '${inherited}'):
flag = '$OTHER_CFLAGS'
if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
else:
other_ccflags.append(flag)
cflags_cc += other_ccflags
self.configname = None
return cflags_cc
def _AddObjectiveCGarbageCollectionFlags(self, flags):
gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
if gc_policy == 'supported':
flags.append('-fobjc-gc')
elif gc_policy == 'required':
flags.append('-fobjc-gc-only')
def _AddObjectiveCARCFlags(self, flags):
if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
flags.append('-fobjc-arc')
def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
'YES', default='NO'):
flags.append('-Wobjc-missing-property-synthesis')
def GetCflagsObjC(self, configname):
"""Returns flags that need to be added to .m compilations."""
self.configname = configname
cflags_objc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
self._AddObjectiveCARCFlags(cflags_objc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
self.configname = None
return cflags_objc
def GetCflagsObjCC(self, configname):
"""Returns flags that need to be added to .mm compilations."""
self.configname = configname
cflags_objcc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
self._AddObjectiveCARCFlags(cflags_objcc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
cflags_objcc.append('-fobjc-call-cxx-cdtors')
self.configname = None
return cflags_objcc
def GetInstallNameBase(self):
"""Return DYLIB_INSTALL_NAME_BASE for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
install_base = self.GetPerTargetSetting(
'DYLIB_INSTALL_NAME_BASE',
default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
return install_base
def _StandardizePath(self, path):
"""Do :standardizepath processing for path."""
# I'm not quite sure what :standardizepath does. Just call normpath(),
# but don't let @executable_path/../foo collapse to foo.
if '/' in path:
prefix, rest = '', path
if path.startswith('@'):
prefix, rest = path.split('/', 1)
rest = os.path.normpath(rest) # :standardizepath
path = os.path.join(prefix, rest)
return path
def GetInstallName(self):
"""Return LD_DYLIB_INSTALL_NAME for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
default_install_name = \
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
install_name = self.GetPerTargetSetting(
'LD_DYLIB_INSTALL_NAME', default=default_install_name)
# Hardcode support for the variables used in chromium for now, to
# unblock people using the make build.
if '$' in install_name:
assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
'$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
'yet in target \'%s\' (got \'%s\')' %
(self.spec['target_name'], install_name))
install_name = install_name.replace(
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
self._StandardizePath(self.GetInstallNameBase()))
if self._IsBundle():
# These are only valid for bundles, hence the |if|.
install_name = install_name.replace(
'$(WRAPPER_NAME)', self.GetWrapperName())
install_name = install_name.replace(
'$(PRODUCT_NAME)', self.GetProductName())
else:
assert '$(WRAPPER_NAME)' not in install_name
assert '$(PRODUCT_NAME)' not in install_name
install_name = install_name.replace(
'$(EXECUTABLE_PATH)', self.GetExecutablePath())
return install_name
def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
"""Checks if ldflag contains a filename and if so remaps it from
gyp-directory-relative to build-directory-relative."""
# This list is expanded on demand.
# They get matched as:
# -exported_symbols_list file
# -Wl,exported_symbols_list file
# -Wl,exported_symbols_list,file
LINKER_FILE = '(\S+)'
WORD = '\S+'
linker_flags = [
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
['-unexported_symbols_list', LINKER_FILE],
['-reexported_symbols_list', LINKER_FILE],
['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
]
for flag_pattern in linker_flags:
regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
m = regex.match(ldflag)
if m:
ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
ldflag[m.end(1):]
# Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
# TODO(thakis): Update ffmpeg.gyp):
if ldflag.startswith('-L'):
ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
return ldflag
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
"""Returns flags that need to be passed to the linker.
Args:
configname: The name of the configuration to get ld flags for.
product_dir: The directory where products such static and dynamic
libraries are placed. This is added to the library search path.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
self.configname = configname
ldflags = []
# The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
# can contain entries that depend on this. Explicitly absolutify these.
for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
ldflags.append('-Wl,-dead_strip')
if self._Test('PREBINDING', 'YES', default='NO'):
ldflags.append('-Wl,-prebind')
self._Appendf(
ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
self._Appendf(
ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
self._AppendPlatformVersionMinFlags(ldflags)
if 'SDKROOT' in self._Settings():
ldflags.append('-isysroot ' + self._SdkPath())
for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
ldflags.append('-L' + gyp_to_build_path(library_path))
if 'ORDER_FILE' in self._Settings():
ldflags.append('-Wl,-order_file ' +
'-Wl,' + gyp_to_build_path(
self._Settings()['ORDER_FILE']))
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', [self._DefaultArch()])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
ldflags.append('-arch ' + archs[0])
# Xcode adds the product directory by default.
ldflags.append('-L' + product_dir)
install_name = self.GetInstallName()
if install_name and self.spec['type'] != 'loadable_module':
ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
ldflags.append('-Wl,-rpath,' + rpath)
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', self._SdkPath()))
self.configname = None
return ldflags
def GetLibtoolflags(self, configname):
"""Returns flags that need to be passed to the static linker.
Args:
configname: The name of the configuration to get ld flags for.
"""
self.configname = configname
libtoolflags = []
for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
libtoolflags.append(libtoolflag)
# TODO(thakis): ARCHS?
self.configname = None
return libtoolflags
def GetPerTargetSettings(self):
"""Gets a list of all the per-target settings. This will only fetch keys
whose values are the same across all configurations."""
first_pass = True
result = {}
for configname in sorted(self.xcode_settings.keys()):
if first_pass:
result = dict(self.xcode_settings[configname])
first_pass = False
else:
for key, value in self.xcode_settings[configname].iteritems():
if key not in result:
continue
elif result[key] != value:
del result[key]
return result
def GetPerConfigSetting(self, setting, configname, default=None):
if configname in self.xcode_settings:
return self.xcode_settings[configname].get(setting, default)
else:
return self.GetPerTargetSetting(setting, default)
def GetPerTargetSetting(self, setting, default=None):
"""Tries to get xcode_settings.setting from spec. Assumes that the setting
has the same value in all configurations and throws otherwise."""
is_first_pass = True
result = None
for configname in sorted(self.xcode_settings.keys()):
if is_first_pass:
result = self.xcode_settings[configname].get(setting, None)
is_first_pass = False
else:
assert result == self.xcode_settings[configname].get(setting, None), (
"Expected per-target setting for '%s', got per-config setting "
"(target %s)" % (setting, self.spec['target_name']))
if result is None:
return default
return result
def _GetStripPostbuilds(self, configname, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to strip this target's binary. These should be run as postbuilds
before the actual postbuilds run."""
self.configname = configname
result = []
if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
if self.spec['type'] == 'loadable_module' and self._IsBundle():
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
strip_flags = {
'all': '',
'non-global': '-x',
'debugging': '-S',
}[strip_style]
explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
if explicit_strip_flags:
strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
if not quiet:
result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
result.append('strip %s %s' % (strip_flags, output_binary))
self.configname = None
return result
def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to massage this target's debug information. These should be run
as postbuilds before the actual postbuilds run."""
self.configname = configname
# For static libraries, no dSYMs are created.
result = []
if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
self._Test(
'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
self.spec['type'] != 'static_library'):
if not quiet:
result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
self.configname = None
return result
def _GetTargetPostbuilds(self, configname, output, output_binary,
quiet=False):
"""Returns a list of shell commands that contain the shell commands
to run as postbuilds for this target, before the actual postbuilds."""
# dSYMs need to build before stripping happens.
return (
self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
self._GetStripPostbuilds(configname, output_binary, quiet))
def _GetIOSPostbuilds(self, configname, output_binary):
"""Return a shell command to codesign the iOS output binary so it can
be deployed to a device. This should be run as the very last step of the
build."""
if not (self.isIOS and self.spec['type'] == "executable"):
return []
settings = self.xcode_settings[configname]
key = self._GetIOSCodeSignIdentityKey(settings)
if not key:
return []
# Warn for any unimplemented signing xcode keys.
unimpl = ['OTHER_CODE_SIGN_FLAGS']
unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
if unimpl:
print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
', '.join(sorted(unimpl)))
return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
settings.get('CODE_SIGN_ENTITLEMENTS', ''),
settings.get('PROVISIONING_PROFILE', ''))
]
def _GetIOSCodeSignIdentityKey(self, settings):
identity = settings.get('CODE_SIGN_IDENTITY')
if not identity:
return None
if identity not in XcodeSettings._codesigning_key_cache:
output = subprocess.check_output(
['security', 'find-identity', '-p', 'codesigning', '-v'])
for line in output.splitlines():
if identity in line:
fingerprint = line.split()[1]
cache = XcodeSettings._codesigning_key_cache
assert identity not in cache or fingerprint == cache[identity], (
"Multiple codesigning fingerprints for identity: %s" % identity)
XcodeSettings._codesigning_key_cache[identity] = fingerprint
return XcodeSettings._codesigning_key_cache.get(identity, '')
def AddImplicitPostbuilds(self, configname, output, output_binary,
postbuilds=[], quiet=False):
"""Returns a list of shell commands that should run before and after
|postbuilds|."""
assert output_binary is not None
pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
post = self._GetIOSPostbuilds(configname, output_binary)
return pre + postbuilds + post
def _AdjustLibrary(self, library, config_name=None):
if library.endswith('.framework'):
l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
else:
m = self.library_re.match(library)
if m:
l = '-l' + m.group(1)
else:
l = library
return l.replace('$(SDKROOT)', self._SdkPath(config_name))
def AdjustLibraries(self, libraries, config_name=None):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
'-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
"""
libraries = [self._AdjustLibrary(library, config_name)
for library in libraries]
return libraries
def _BuildMachineOSBuild(self):
return self._GetStdout(['sw_vers', '-buildVersion'])
def _XcodeVersion(self):
# `xcodebuild -version` output looks like
# Xcode 4.6.3
# Build version 4H1503
# or like
# Xcode 3.2.6
# Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
# BuildVersion: 10M2518
# Convert that to '0463', '4H1503'.
if len(XcodeSettings._xcode_version_cache) == 0:
version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
version = version_list[0]
build = version_list[-1]
# Be careful to convert "4.2" to "0420":
version = version.split()[-1].replace('.', '')
version = (version + '0' * (3 - len(version))).zfill(4)
build = build.split()[-1]
XcodeSettings._xcode_version_cache = (version, build)
return XcodeSettings._xcode_version_cache
def _XcodeIOSDeviceFamily(self, configname):
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
return [int(x) for x in family.split(',')]
def GetExtraPlistItems(self, configname=None):
"""Returns a dictionary with extra items to insert into Info.plist."""
if configname not in XcodeSettings._plist_cache:
cache = {}
cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
xcode, xcode_build = self._XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
sdk_root = self._SdkRoot(configname)
if not sdk_root:
sdk_root = self._DefaultSdkRoot()
cache['DTSDKName'] = sdk_root
if xcode >= '0430':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductBuildVersion')
else:
cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
if self.isIOS:
cache['DTPlatformName'] = cache['DTSDKName']
if configname.endswith("iphoneos"):
cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductVersion')
cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
else:
cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
XcodeSettings._plist_cache[configname] = cache
# Include extra plist items that are per-target, not per global
# XcodeSettings.
items = dict(XcodeSettings._plist_cache[configname])
if self.isIOS:
items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
return items
def _DefaultSdkRoot(self):
"""Returns the default SDKROOT to use.
Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
project, then the environment variable was empty. Starting with this
version, Xcode uses the name of the newest SDK installed.
"""
if self._XcodeVersion() < '0500':
return ''
default_sdk_path = self._XcodeSdkPath('')
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
if default_sdk_root:
return default_sdk_root
all_sdks = self._GetStdout(['xcodebuild', '-showsdks'])
for line in all_sdks.splitlines():
items = line.split()
if len(items) >= 3 and items[-2] == '-sdk':
sdk_root = items[-1]
sdk_path = self._XcodeSdkPath(sdk_root)
if sdk_path == default_sdk_path:
return sdk_root
return ''
def _DefaultArch(self):
# For Mac projects, Xcode changed the default value used when ARCHS is not
# set from "i386" to "x86_64".
#
# For iOS projects, if ARCHS is unset, it defaults to "armv7 armv7s" when
# building for a device, and the simulator binaries are always build for
# "i386".
#
# For new projects, ARCHS is set to $(ARCHS_STANDARD_INCLUDING_64_BIT),
# which correspond to "armv7 armv7s arm64", and when building the simulator
# the architecture is either "i386" or "x86_64" depending on the simulated
# device (respectively 32-bit or 64-bit device).
#
# Since the value returned by this function is only used when ARCHS is not
# set, then on iOS we return "i386", as the default xcode project generator
# does not set ARCHS if it is not set in the .gyp file.
if self.isIOS:
return 'i386'
version, build = self._XcodeVersion()
if version >= '0500':
return 'x86_64'
return 'i386'
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
This feature consists of several pieces:
* If GCC_PREFIX_HEADER is present, all compilations in that project get an
additional |-include path_to_prefix_header| cflag.
* If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
instead compiled, and all other compilations in the project get an
additional |-include path_to_compiled_header| instead.
+ Compiled prefix headers have the extension gch. There is one gch file for
every language used in the project (c, cc, m, mm), since gch files for
different languages aren't compatible.
+ gch files themselves are built with the target's normal cflags, but they
obviously don't get the |-include| flag. Instead, they need a -x flag that
describes their language.
+ All o files in the target need to depend on the gch file, to make sure
it's built before any o file is built.
This class helps with some of these tasks, but it needs help from the build
system for writing dependencies to the gch files, for writing build commands
for the gch files, and for figuring out the location of the gch files.
"""
def __init__(self, xcode_settings,
gyp_path_to_build_path, gyp_path_to_build_output):
"""If xcode_settings is None, all methods on this class are no-ops.
Args:
gyp_path_to_build_path: A function that takes a gyp-relative path,
and returns a path relative to the build directory.
gyp_path_to_build_output: A function that takes a gyp-relative path and
a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
to where the output of precompiling that path for that language
should be placed (without the trailing '.gch').
"""
# This doesn't support per-configuration prefix headers. Good enough
# for now.
self.header = None
self.compile_headers = False
if xcode_settings:
self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
self.compile_headers = xcode_settings.GetPerTargetSetting(
'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
self.compiled_headers = {}
if self.header:
if self.compile_headers:
for lang in ['c', 'cc', 'm', 'mm']:
self.compiled_headers[lang] = gyp_path_to_build_output(
self.header, lang)
self.header = gyp_path_to_build_path(self.header)
def _CompiledHeader(self, lang, arch):
assert self.compile_headers
h = self.compiled_headers[lang]
if arch:
h += '.' + arch
return h
def GetInclude(self, lang, arch=None):
"""Gets the cflags to include the prefix header for language |lang|."""
if self.compile_headers and lang in self.compiled_headers:
return '-include %s' % self._CompiledHeader(lang, arch)
elif self.header:
return '-include %s' % self.header
else:
return ''
def _Gch(self, lang, arch):
"""Returns the actual file name of the prefix header for language |lang|."""
assert self.compile_headers
return self._CompiledHeader(lang, arch) + '.gch'
def GetObjDependencies(self, sources, objs, arch=None):
"""Given a list of source files and the corresponding object files, returns
a list of (source, object, gch) tuples, where |gch| is the build-directory
relative path to the gch file each object file depends on. |compilable[i]|
has to be the source file belonging to |objs[i]|."""
if not self.header or not self.compile_headers:
return []
result = []
for source, obj in zip(sources, objs):
ext = os.path.splitext(source)[1]
lang = {
'.c': 'c',
'.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
'.m': 'm',
'.mm': 'mm',
}.get(ext, None)
if lang:
result.append((source, obj, self._Gch(lang, arch)))
return result
def GetPchBuildCommands(self, arch=None):
"""Returns [(path_to_gch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory.
"""
if not self.header or not self.compile_headers:
return []
return [
(self._Gch('c', arch), '-x c-header', 'c', self.header),
(self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
(self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
(self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
]
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
"""Merges the global xcode_settings dictionary into each configuration of the
target represented by spec. For keys that are both in the global and the local
xcode_settings dict, the local key gets precendence.
"""
# The xcode generator special-cases global xcode_settings and does something
# that amounts to merging in the global xcode_settings into each local
# xcode_settings dict.
global_xcode_settings = global_dict.get('xcode_settings', {})
for config in spec['configurations'].values():
if 'xcode_settings' in config:
new_settings = global_xcode_settings.copy()
new_settings.update(config['xcode_settings'])
config['xcode_settings'] = new_settings
def IsMacBundle(flavor, spec):
"""Returns if |spec| should be treated as a bundle.
Bundles are directories with a certain subdirectory structure, instead of
just a single file. Bundle rules do not produce a binary but also package
resources into that directory."""
is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
if is_mac_bundle:
assert spec['type'] != 'none', (
'mac_bundle targets cannot have type none (target "%s")' %
spec['target_name'])
return is_mac_bundle
def GetMacBundleResources(product_dir, xcode_settings, resources):
"""Yields (output, resource) pairs for every resource in |resources|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
resources: A list of bundle resources, relative to the build directory.
"""
dest = os.path.join(product_dir,
xcode_settings.GetBundleResourceFolder())
for res in resources:
output = dest
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in res, (
"Spaces in resource filenames not supported (%s)" % res)
# Split into (path,file).
res_parts = os.path.split(res)
# Now split the path into (prefix,maybe.lproj).
lproj_parts = os.path.split(res_parts[0])
# If the resource lives in a .lproj bundle, add that to the destination.
if lproj_parts[1].endswith('.lproj'):
output = os.path.join(output, lproj_parts[1])
output = os.path.join(output, res_parts[1])
# Compiled XIB files are referred to by .nib.
if output.endswith('.xib'):
output = os.path.splitext(output)[0] + '.nib'
# Compiled storyboard files are referred to by .storyboardc.
if output.endswith('.storyboard'):
output = os.path.splitext(output)[0] + '.storyboardc'
yield output, res
def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
"""Returns (info_plist, dest_plist, defines, extra_env), where:
* |info_plist| is the source plist path, relative to the
build directory,
* |dest_plist| is the destination plist path, relative to the
build directory,
* |defines| is a list of preprocessor defines (empty if the plist
shouldn't be preprocessed,
* |extra_env| is a dict of env variables that should be exported when
invoking |mac_tool copy-info-plist|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
if not info_plist:
return None, None, [], {}
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in info_plist, (
"Spaces in Info.plist filenames not supported (%s)" % info_plist)
info_plist = gyp_path_to_build_path(info_plist)
# If explicitly set to preprocess the plist, invoke the C preprocessor and
# specify any defines as -D flags.
if xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESS', default='NO') == 'YES':
# Create an intermediate file based on the path.
defines = shlex.split(xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
else:
defines = []
dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
extra_env = xcode_settings.GetPerTargetSettings()
return info_plist, dest_plist, defines, extra_env
def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings=None):
"""Return the environment variables that Xcode would set. See
http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
for a full list.
Args:
xcode_settings: An XcodeSettings object. If this is None, this function
returns an empty dict.
built_products_dir: Absolute path to the built products dir.
srcroot: Absolute path to the source root.
configuration: The build configuration name.
additional_settings: An optional dict with more values to add to the
result.
"""
if not xcode_settings: return {}
# This function is considered a friend of XcodeSettings, so let it reach into
# its implementation details.
spec = xcode_settings.spec
# These are filled in on a as-needed basis.
env = {
'BUILT_PRODUCTS_DIR' : built_products_dir,
'CONFIGURATION' : configuration,
'PRODUCT_NAME' : xcode_settings.GetProductName(),
# See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
'SRCROOT' : srcroot,
'SOURCE_ROOT': '${SRCROOT}',
# This is not true for static libraries, but currently the env is only
# written for bundles:
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
}
if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
env['SDKROOT'] = xcode_settings._SdkPath(configuration)
else:
env['SDKROOT'] = ''
if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
mach_o_type = xcode_settings.GetMachOType()
if mach_o_type:
env['MACH_O_TYPE'] = mach_o_type
env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
if xcode_settings._IsBundle():
env['CONTENTS_FOLDER_PATH'] = \
xcode_settings.GetBundleContentsFolderPath()
env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
xcode_settings.GetBundleResourceFolder()
env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
install_name = xcode_settings.GetInstallName()
if install_name:
env['LD_DYLIB_INSTALL_NAME'] = install_name
install_name_base = xcode_settings.GetInstallNameBase()
if install_name_base:
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
if not additional_settings:
additional_settings = {}
else:
# Flatten lists to strings.
for k in additional_settings:
if not isinstance(additional_settings[k], str):
additional_settings[k] = ' '.join(additional_settings[k])
additional_settings.update(env)
for k in additional_settings:
additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
return additional_settings
def _NormalizeEnvVarReferences(str):
"""Takes a string containing variable references in the form ${FOO}, $(FOO),
or $FOO, and returns a string with all variable references in the form ${FOO}.
"""
# $FOO -> ${FOO}
str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
# $(FOO) -> ${FOO}
matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
for match in matches:
to_replace, variable = match
assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
str = str.replace(to_replace, '${' + variable + '}')
return str
def ExpandEnvVars(string, expansions):
"""Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
expansions list. If the variable expands to something that references
another variable, this variable is expanded as well if it's in env --
until no variables present in env are left."""
for k, v in reversed(expansions):
string = string.replace('${' + k + '}', v)
string = string.replace('$(' + k + ')', v)
string = string.replace('$' + k, v)
return string
def _TopologicallySortedEnvVarKeys(env):
"""Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1].
Throws an Exception in case of dependency cycles.
"""
# Since environment variables can refer to other variables, the evaluation
# order is important. Below is the logic to compute the dependency graph
# and sort it.
regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
matches = set([v for v in regex.findall(env[node]) if v in env])
for dependee in matches:
assert '${' not in dependee, 'Nested variables not supported: ' + dependee
return matches
try:
# Topologically sort, and then reverse, because we used an edge definition
# that's inverted from the expected result of this function (see comment
# above).
order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
order.reverse()
return order
except gyp.common.CycleError, e:
raise GypError(
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
configuration, additional_settings=None):
env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings)
return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
def GetSpecPostbuildCommands(spec, quiet=False):
"""Returns the list of postbuilds explicitly defined on |spec|, in a form
executable by a shell."""
postbuilds = []
for postbuild in spec.get('postbuilds', []):
if not quiet:
postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
spec['target_name'], postbuild['postbuild_name']))
postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
return postbuilds
def _HasIOSTarget(targets):
"""Returns true if any target contains the iOS specific key
IPHONEOS_DEPLOYMENT_TARGET."""
for target_dict in targets.values():
for config in target_dict['configurations'].values():
if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
return True
return False
def _AddIOSDeviceConfigurations(targets):
"""Clone all targets and append -iphoneos to the name. Configure these targets
to build for iOS devices."""
for target_dict in targets.values():
for config_name in target_dict['configurations'].keys():
config = target_dict['configurations'][config_name]
new_config_name = config_name + '-iphoneos'
new_config_dict = copy.deepcopy(config)
if target_dict['toolset'] == 'target':
new_config_dict['xcode_settings']['ARCHS'] = ['armv7']
new_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
target_dict['configurations'][new_config_name] = new_config_dict
return targets
def CloneConfigurationForDeviceAndEmulator(target_dicts):
"""If |target_dicts| contains any iOS targets, automatically create -iphoneos
targets for iOS device builds."""
if _HasIOSTarget(target_dicts):
return _AddIOSDeviceConfigurations(target_dicts)
return target_dicts
|
from unittest import TestCase
from scrapy.settings import Settings
from scrapy_tracker.storage.memory import MemoryStorage
from scrapy_tracker.storage.redis import RedisStorage
from scrapy_tracker.storage.sqlalchemy import SqlAlchemyStorage
from tests import TEST_KEY, TEST_CHECKSUM, mock
class TestMemoryStorage(TestCase):
def setUp(self):
self.storage = MemoryStorage(None)
def test_getset(self):
result = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertIsNone(result)
found = self.storage.getset(TEST_KEY, 'new_checksum')
self.assertEqual(TEST_CHECKSUM, found)
found = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertEqual('new_checksum', found)
result = self.storage.getset('new_key', TEST_CHECKSUM)
self.assertIsNone(result)
class TestSqlAlchemyStorage(TestCase):
def setUp(self):
self.storage = SqlAlchemyStorage(Settings({
'TRACKER_SQLALCHEMY_ENGINE': 'sqlite:///:memory:',
'TRACKER_SQLALCHEMY_FLUSH_DB': True
}))
def test_getset(self):
result = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertIsNone(result)
found = self.storage.getset(TEST_KEY, 'new_checksum')
self.assertEqual(TEST_CHECKSUM, found)
found = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertEqual('new_checksum', found)
result = self.storage.getset('new_key', TEST_CHECKSUM)
self.assertIsNone(result)
class TestRedisStorage(TestCase):
def setUp(self):
with mock.patch("scrapy_tracker.storage.redis.StrictRedis") as mock_redis:
data = {}
def getset(key, val):
old_val = data.get(key)
data[key] = val
return old_val
mock_getset = mock.MagicMock()
mock_getset.getset.side_effect = getset
mock_redis.return_value = mock_getset
self.storage = RedisStorage(Settings({
'TRACKER_RADIS_FLUSH_DB': True
}))
def test_getset(self):
result = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertIsNone(result)
found = self.storage.getset(TEST_KEY, 'new_checksum')
self.assertEqual(TEST_CHECKSUM, found)
found = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertEqual('new_checksum', found)
result = self.storage.getset('new_key', TEST_CHECKSUM)
self.assertIsNone(result)
|
import unittest
import autoconfig
import pygccxml
from pygccxml.utils import *
from pygccxml.parser import *
from pygccxml import declarations
class tester_t( unittest.TestCase ):
def __init__(self, *args ):
unittest.TestCase.__init__( self, *args )
def __test_split_impl(self, decl_string, name, args):
self.failUnless( ( name, args ) == declarations.call_invocation.split( decl_string ) )
def __test_split_recursive_impl(self, decl_string, control_seq):
self.failUnless( control_seq == declarations.call_invocation.split_recursive( decl_string ) )
def __test_is_call_invocation_impl( self, decl_string ):
self.failUnless( declarations.call_invocation.is_call_invocation( decl_string ) )
def test_split_on_vector(self):
self.__test_is_call_invocation_impl( "vector(int,std::allocator(int) )" )
self.__test_split_impl( "vector(int,std::allocator(int) )"
, "vector"
, [ "int", "std::allocator(int)" ] )
self.__test_split_recursive_impl( "vector(int,std::allocator(int) )"
, [ ( "vector", [ "int", "std::allocator(int)" ] )
, ( "std::allocator", ["int"] ) ] )
def test_split_on_string(self):
self.__test_is_call_invocation_impl( "basic_string(char,std::char_traits(char),std::allocator(char) )" )
self.__test_split_impl( "basic_string(char,std::char_traits(char),std::allocator(char) )"
, "basic_string"
, [ "char", "std::char_traits(char)", "std::allocator(char)" ] )
def test_split_on_map(self):
self.__test_is_call_invocation_impl( "map(long int,std::vector(int, std::allocator(int) ),std::less(long int),std::allocator(std::pair(const long int, std::vector(int, std::allocator(int) ) ) ) )" )
self.__test_split_impl( "map(long int,std::vector(int, std::allocator(int) ),std::less(long int),std::allocator(std::pair(const long int, std::vector(int, std::allocator(int) ) ) ) )"
, "map"
, [ "long int"
, "std::vector(int, std::allocator(int) )"
, "std::less(long int)"
, "std::allocator(std::pair(const long int, std::vector(int, std::allocator(int) ) ) )" ] )
def test_join_on_vector(self):
self.failUnless( "vector( int, std::allocator(int) )"
== declarations.call_invocation.join("vector", ( "int", "std::allocator(int)" ) ) )
def test_find_args(self):
temp = 'x()()'
found = declarations.call_invocation.find_args( temp )
self.failUnless( (1,2) == found )
found = declarations.call_invocation.find_args( temp, found[1]+1 )
self.failUnless( (3, 4) == found )
temp = 'x(int,int)(1,2)'
found = declarations.call_invocation.find_args( temp )
self.failUnless( (1,9) == found )
found = declarations.call_invocation.find_args( temp, found[1]+1 )
self.failUnless( (10, 14) == found )
def test_bug_unmatched_brace( self ):
src = 'AlternativeName((&string("")), (&string("")), (&string("")))'
self.__test_split_impl( src
, 'AlternativeName'
, ['(&string(""))', '(&string(""))', '(&string(""))'] )
def create_suite():
suite = unittest.TestSuite()
suite.addTest( unittest.makeSuite(tester_t))
return suite
def run_suite():
unittest.TextTestRunner(verbosity=2).run( create_suite() )
if __name__ == "__main__":
run_suite()
|
"""
Ludolph: Monitoring Jabber Bot
Copyright (C) 2012-2017 Erigones, s. r. o.
This file is part of Ludolph.
See the LICENSE file for copying permission.
"""
import os
import re
import sys
import signal
import logging
from collections import namedtuple
try:
# noinspection PyCompatibility,PyUnresolvedReferences
from configparser import RawConfigParser
except ImportError:
# noinspection PyCompatibility,PyUnresolvedReferences
from ConfigParser import RawConfigParser
try:
# noinspection PyCompatibility
from importlib import reload
except ImportError:
# noinspection PyUnresolvedReferences
from imp import reload
from ludolph.utils import parse_loglevel
from ludolph.bot import LudolphBot
from ludolph.plugins.plugin import LudolphPlugin
from ludolph import __version__
LOGFORMAT = '%(asctime)s %(levelname)-8s %(name)s: %(message)s'
logger = logging.getLogger('ludolph.main')
Plugin = namedtuple('Plugin', ('name', 'module', 'cls'))
def daemonize():
"""
http://code.activestate.com/recipes/278731-creating-a-daemon-the-python-way/
http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/
"""
try:
pid = os.fork() # Fork #1
if pid > 0:
sys.exit(0) # Exit first parent
except OSError as e:
sys.stderr.write('Fork #1 failed: %d (%s)\n' % (e.errno, e.strerror))
sys.exit(1)
# The first child. Decouple from parent environment
# Become session leader of this new session.
# Also be guaranteed not to have a controlling terminal
os.chdir('/')
# noinspection PyArgumentList
os.setsid()
os.umask(0o022)
try:
pid = os.fork() # Fork #2
if pid > 0:
sys.exit(0) # Exit from second parent
except OSError as e:
sys.stderr.write('Fork #2 failed: %d (%s)\n' % (e.errno, e.strerror))
sys.exit(1)
# Close all open file descriptors
import resource # Resource usage information
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if maxfd == resource.RLIM_INFINITY:
maxfd = 1024
# Iterate through and close all file descriptors
for fd in range(0, maxfd):
try:
os.close(fd)
except OSError: # ERROR, fd wasn't open (ignored)
pass
# Redirect standard file descriptors to /dev/null
sys.stdout.flush()
sys.stderr.flush()
si = open(os.devnull, 'r')
so = open(os.devnull, 'a+')
se = open(os.devnull, 'a+')
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
return 0
def start():
"""
Start the daemon.
"""
ret = 0
cfg = 'ludolph.cfg'
cfg_fp = None
cfg_lo = ((os.path.expanduser('~'), '.' + cfg), (sys.prefix, 'etc', cfg), ('/etc', cfg))
config_base_sections = ('global', 'xmpp', 'webserver', 'cron', 'ludolph.bot')
# Try to read config file from ~/.ludolph.cfg or /etc/ludolph.cfg
for i in cfg_lo:
try:
cfg_fp = open(os.path.join(*i))
except IOError:
continue
else:
break
if not cfg_fp:
sys.stderr.write("""\nLudolph can't start!\n
You need to create a config file in one these locations: \n%s\n
You can rename ludolph.cfg.example and update the required options.
The example file is located in: %s\n\n""" % (
'\n'.join([os.path.join(*i) for i in cfg_lo]),
os.path.dirname(os.path.abspath(__file__))))
sys.exit(1)
# Read and parse configuration
# noinspection PyShadowingNames
def load_config(fp, reopen=False):
config = RawConfigParser()
if reopen:
fp = open(fp.name)
try: # config.readfp() is Deprecated since python 3.2
# noinspection PyDeprecation
read_file = config.readfp
except AttributeError:
read_file = config.read_file
read_file(fp)
fp.close()
return config
config = load_config(cfg_fp)
# Prepare logging configuration
logconfig = {
'level': parse_loglevel(config.get('global', 'loglevel')),
'format': LOGFORMAT,
}
if config.has_option('global', 'logfile'):
logfile = config.get('global', 'logfile').strip()
if logfile:
logconfig['filename'] = logfile
# Daemonize
if config.has_option('global', 'daemon'):
if config.getboolean('global', 'daemon'):
ret = daemonize()
# Save pid file
if config.has_option('global', 'pidfile'):
try:
with open(config.get('global', 'pidfile'), 'w') as fp:
fp.write('%s' % os.getpid())
except Exception as ex:
# Setup logging just to show this error
logging.basicConfig(**logconfig)
logger.critical('Could not write to pidfile (%s)\n', ex)
sys.exit(1)
# Setup logging
logging.basicConfig(**logconfig)
# All exceptions will be logged without exit
def log_except_hook(*exc_info):
logger.critical('Unhandled exception!', exc_info=exc_info)
sys.excepthook = log_except_hook
# Default configuration
use_tls = True
use_ssl = False
address = []
# Starting
logger.info('Starting Ludolph %s (%s %s)', __version__, sys.executable, sys.version.split()[0])
logger.info('Loaded configuration from %s', cfg_fp.name)
# Load plugins
# noinspection PyShadowingNames
def load_plugins(config, reinit=False):
plugins = []
for config_section in config.sections():
config_section = config_section.strip()
if config_section in config_base_sections:
continue
# Parse other possible imports
parsed_plugin = config_section.split('.')
if len(parsed_plugin) == 1:
modname = 'ludolph.plugins.' + config_section
plugin = config_section
else:
modname = config_section
plugin = parsed_plugin[-1]
logger.info('Loading plugin: %s', modname)
try:
# Translate super_ludolph_plugin into SuperLudolphPlugin
clsname = plugin[0].upper() + re.sub(r'_+([a-zA-Z0-9])', lambda m: m.group(1).upper(), plugin[1:])
module = __import__(modname, fromlist=[clsname])
if reinit and getattr(module, '_loaded_', False):
reload(module)
module._loaded_ = True
imported_class = getattr(module, clsname)
if not issubclass(imported_class, LudolphPlugin):
raise TypeError('Plugin: %s is not LudolphPlugin instance' % modname)
plugins.append(Plugin(config_section, modname, imported_class))
except Exception as ex:
logger.exception(ex)
logger.critical('Could not load plugin: %s', modname)
return plugins
plugins = load_plugins(config)
# XMPP connection settings
if config.has_option('xmpp', 'host'):
address = [config.get('xmpp', 'host'), '5222']
if config.has_option('xmpp', 'port'):
address[1] = config.get('xmpp', 'port')
logger.info('Connecting to jabber server %s', ':'.join(address))
else:
logger.info('Using DNS SRV lookup to find jabber server')
if config.has_option('xmpp', 'tls'):
use_tls = config.getboolean('xmpp', 'tls')
if config.has_option('xmpp', 'ssl'):
use_ssl = config.getboolean('xmpp', 'ssl')
# Here we go
xmpp = LudolphBot(config, plugins=plugins)
signal.signal(signal.SIGINT, xmpp.shutdown)
signal.signal(signal.SIGTERM, xmpp.shutdown)
if hasattr(signal, 'SIGHUP'): # Windows does not support SIGHUP - bug #41
# noinspection PyUnusedLocal,PyShadowingNames
def sighup(signalnum, handler):
if xmpp.reloading:
logger.warning('Reload already in progress')
else:
xmpp.reloading = True
try:
config = load_config(cfg_fp, reopen=True)
logger.info('Reloaded configuration from %s', cfg_fp.name)
xmpp.prereload()
plugins = load_plugins(config, reinit=True)
xmpp.reload(config, plugins=plugins)
finally:
xmpp.reloading = False
signal.signal(signal.SIGHUP, sighup)
# signal.siginterrupt(signal.SIGHUP, false) # http://stackoverflow.com/a/4302037
if xmpp.client.connect(tuple(address), use_tls=use_tls, use_ssl=use_ssl):
xmpp.client.process(block=True)
sys.exit(ret)
else:
logger.error('Ludolph is unable to connect to jabber server')
sys.exit(2)
if __name__ == '__main__':
start()
|
from django_nose.tools import assert_false, assert_true
from pontoon.base.tests import TestCase
from pontoon.base.utils import extension_in
class UtilsTests(TestCase):
def test_extension_in(self):
assert_true(extension_in('filename.txt', ['bat', 'txt']))
assert_true(extension_in('filename.biff', ['biff']))
assert_true(extension_in('filename.tar.gz', ['gz']))
assert_false(extension_in('filename.txt', ['png', 'jpg']))
assert_false(extension_in('.dotfile', ['bat', 'txt']))
# Unintuitive, but that's how splitext works.
assert_false(extension_in('filename.tar.gz', ['tar.gz']))
|
import os
import tempfile
from pkg_resources import Requirement
from infi.unittest import parameters
from .test_cases import ForgeTest
from pydeploy.environment import Environment
from pydeploy.environment_utils import EnvironmentUtils
from pydeploy.checkout_cache import CheckoutCache
from pydeploy.installer import Installer
from pydeploy import sources
from pydeploy.scm import git
from pydeploy import command
from pydeploy import exceptions
class SourceTest(ForgeTest):
def setUp(self):
super(SourceTest, self).setUp()
self.env = self.forge.create_mock(Environment)
self.env.installer = self.forge.create_mock(Installer)
self.env.utils = self.forge.create_mock(EnvironmentUtils)
class SourceFromStringTest(ForgeTest):
def setUp(self):
super(SourceFromStringTest, self).setUp()
self.S = sources.Source.from_anything
def test__git(self):
self.assertIsInstance(self.S("git://bla"), sources.Git)
def test__path(self):
filename = tempfile.mkdtemp()
self.assertIsInstance(self.S(filename), sources.Path)
def test__easy_install(self):
self.assertIsInstance(self.S("blablabla"), sources.EasyInstall)
def test__invalid_source(self):
for invalid_value in [2, 2.5, True]:
with self.assertRaises(ValueError):
self.S(invalid_value)
class PathSourceTest(SourceTest):
def setUp(self):
super(PathSourceTest, self).setUp()
self.path = tempfile.mkdtemp()
self.source = sources.Path(self.path)
def test__get_name(self):
self.assertEquals(self.source.get_name(), self.path)
def test__uses_expanduser(self):
source = sources.Path("~/a/b/c")
self.assertEquals(source._param, os.path.expanduser("~/a/b/c"))
def test__get_signature(self):
self.assertEquals(self.source.get_signature(), "Path({0})".format(self.path))
def test__checkout(self):
self.assertEquals(self.source.checkout(self.env), self.path)
with self.assertRaises(NotImplementedError):
self.source.checkout(self.env, '/another/path')
@parameters.toggle('reinstall')
def test__install(self, reinstall):
self.env.installer.install_unpacked_package(self.path, self.path, reinstall=reinstall)
self.forge.replay()
self.source.install(self.env, reinstall=reinstall)
class DelegateToPathInstallTest(SourceTest):
def setUp(self):
super(DelegateToPathInstallTest, self).setUp()
self.path_class = self.forge.create_class_mock(sources.Path)
self.orig_path_class = sources.Path
self.forge.replace_with(sources, "Path", self.path_class)
def expect_delegation_to_path_install(self, path, name, reinstall):
path_mock = self.forge.create_mock(self.orig_path_class)
self.path_class(path, name=name).and_return(path_mock)
return path_mock.install(self.env, reinstall=reinstall)
class GitSourceTest(DelegateToPathInstallTest):
def setUp(self):
super(GitSourceTest, self).setUp()
self.repo_url = "some/repo/url"
self.branch = 'some_branch'
self.source = sources.Git(self.repo_url, self.branch)
self.forge.replace_many(git, "clone_to_or_update", "reset_submodules")
def test__master_is_default_branch(self):
self.assertEquals(sources.Git('bla')._branch, 'master')
def test__get_name(self):
self.assertEquals(self.source.get_name(), self.repo_url + "@" + self.branch)
def test__repr(self):
self.assertEquals(repr(self.source), 'Git({})'.format(self.source.get_name()))
def test__get_signature(self):
self.assertEquals(self.source.get_signature(), repr(self.source))
@parameters.toggle('reinstall')
def test__git_source_install(self, reinstall):
self.forge.replace(self.source, "checkout")
checkout_path = "some/checkout/path"
self.source.checkout(self.env).and_return(checkout_path)
self.expect_delegation_to_path_install(checkout_path, name=self.repo_url, reinstall=reinstall)
with self.forge.verified_replay_context():
self.source.install(self.env, reinstall=reinstall)
def test__git_source_checkout_with_path_argument(self):
checkout_path = "/some/path/to/checkout"
git.clone_to_or_update(url=self.repo_url, path=checkout_path, branch=self.branch)
git.reset_submodules(checkout_path)
with self.forge.verified_replay_context():
result = self.source.checkout(self.env, checkout_path)
self.assertIs(result, checkout_path)
def test__git_source_checkout_no_path_argument(self):
checkout_path = "/some/path/to/checkout"
checkout_cache = self.forge.create_mock(CheckoutCache)
self.env.get_checkout_cache().and_return(checkout_cache)
checkout_cache.get_checkout_path(self.repo_url).and_return(checkout_path)
git.clone_to_or_update(url=self.repo_url, branch=self.branch, path=checkout_path)
git.reset_submodules(checkout_path)
with self.forge.verified_replay_context():
result = self.source.checkout(self.env)
self.assertIs(result, checkout_path)
def test__git_identifies_git_prefix(self):
url = "git://bla"
source = sources.Source.from_anything(url)
self.assertIsInstance(source, sources.Git)
class GitContraintsTest(ForgeTest):
def setUp(self):
super(GitContraintsTest, self).setUp()
self.forge.replace(git, "get_remote_references_dict")
self.url = "some_url"
self.source = sources.Git(self.url)
def test__more_than_one_constraint_not_supported(self):
with self.assertRaises(NotImplementedError):
self.source.resolve_constraints([('>=', '2.0.0'), ('<=', '3.0.0')])
@parameters.iterate('tag', ['v2.0.0', '2.0.0'])
def test__exact_version_matches_tag(self, tag):
self._assert_chooses("x==2.0.0", {
git.Tag(tag) : "some_hash"
}, 'tags/{}'.format(tag))
def test__exact_version_with_no_match_raises_exception(self):
self._assert_no_match('x==2.0.0', {
git.Tag('bla') : 'h1',
git.Branch('bloop') : 'h2'
})
@parameters.iterate('branch_name', ['v2.0.0', '2.0.0'])
def test__minimum_version_inclusive_selects_exact(self, branch_name):
self._assert_chooses("x>=2.0.0", {
git.Branch(branch_name) : "h1"
}, branch_name)
@parameters.toggle('inclusive')
@parameters.iterate('branch_name', ['3.0.0', 'v3.0.0', '2.3.2', 'v2.3'])
def test__minimum_version_with_matches(self, inclusive, branch_name):
self._assert_chooses("x{0}2.0.0".format(">=" if inclusive else ">"), {
git.Branch(branch_name)
}, branch_name)
@parameters.toggle('inclusive')
@parameters.iterate('branch_name', ['2.0.0-a1', 'v2.0.0-b1', 'v1.9'])
def test__minimum_version_without_matches(self, inclusive, branch_name):
self._assert_no_match("x{0}2.0.0".format(">=" if inclusive else ">"), {
git.Branch(branch_name)
})
@parameters.toggle('inclusive')
def test__unbound_version_takes_from_master(self, inclusive):
self._assert_chooses("x{0}2.0.0".format(">=" if inclusive else ">"), {
git.Branch('master')
}, 'master')
def _assert_chooses(self, requirement, options, chosen):
requirement = Requirement.parse(requirement)
git.get_remote_references_dict(self.url).and_return(options)
self.forge.replay()
new_source = self.source.resolve_constraints(requirement.specs)
self.assertIsInstance(new_source, sources.Git)
self.assertEquals(new_source._url, self.url)
self.assertEquals(new_source._branch, chosen)
def _assert_no_match(self, requirement, options):
specs = Requirement.parse(requirement).specs
git.get_remote_references_dict(self.url).and_return(options)
self.forge.replay()
with self.assertRaises(exceptions.RequiredVersionNotFound):
self.source.resolve_constraints(specs)
class ExternalToolSourceTest(SourceTest):
def setUp(self):
super(ExternalToolSourceTest, self).setUp()
self.package_name = "some_package==1.0.0"
self.forge.replace(command, "execute_assert_success")
class PIPSourceTest(ExternalToolSourceTest):
@parameters.toggle('reinstall')
def test__install(self, reinstall):
source = sources.PIP(self.package_name)
self.env.execute_pip_install(self.package_name, reinstall=reinstall)
with self.forge.verified_replay_context():
source.install(self.env, reinstall=reinstall)
def test__checkout_not_implemented(self):
with self.assertRaises(NotImplementedError):
sources.PIP(self.package_name).checkout(self.env, '/some/path')
with self.assertRaises(NotImplementedError):
sources.PIP(self.package_name).checkout(self.env)
class EasyInstallSourceTest(ExternalToolSourceTest):
@parameters.toggle('reinstall')
def test__install(self, reinstall):
self.env.execute_easy_install(self.package_name, reinstall=reinstall)
source = sources.EasyInstall(self.package_name)
with self.forge.verified_replay_context():
source.install(self.env, reinstall=reinstall)
def test__checkout_not_implemented(self):
with self.assertRaises(NotImplementedError):
sources.EasyInstall(self.package_name).checkout(self.env, '/some/path')
with self.assertRaises(NotImplementedError):
sources.EasyInstall(self.package_name).checkout(self.env)
class SCMTest(SourceTest):
def test__git(self):
repo = "git://some_repo"
result = sources.SCM(repo)
self.assertIsInstance(result, sources.Git)
self.assertEquals(result._url, repo)
def test__git_with_branch(self):
result = sources.SCM("git://some_repo@branch_name")
self.assertIsInstance(result, sources.Git)
self.assertEquals(result._url, "git://some_repo")
self.assertEquals(result._branch, "branch_name")
def test__other(self):
with self.assertRaises(ValueError):
sources.SCM("bla")
|
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
home = TemplateView.as_view(template_name='home.html')
urlpatterns = patterns(
'',
url(r'^filter/', include('demoproject.filter.urls')),
# An informative homepage.
url(r'', home, name='home')
)
|
import datetime
import time
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import Q
from django.db.models.signals import pre_save
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.auth.models import User
from tidings.models import NotificationsMixin
from kitsune import forums
from kitsune.access.utils import has_perm, perm_is_defined_on
from kitsune.flagit.models import FlaggedObject
from kitsune.sumo.templatetags.jinja_helpers import urlparams, wiki_to_html
from kitsune.sumo.urlresolvers import reverse
from kitsune.sumo.models import ModelBase
from kitsune.search.models import (
SearchMappingType, SearchMixin, register_for_indexing,
register_mapping_type)
def _last_post_from(posts, exclude_post=None):
"""Return the most recent post in the given set, excluding the given post.
If there are none, return None.
"""
if exclude_post:
posts = posts.exclude(id=exclude_post.id)
posts = posts.order_by('-created')
try:
return posts[0]
except IndexError:
return None
class ThreadLockedError(Exception):
"""Trying to create a post in a locked thread."""
class Forum(NotificationsMixin, ModelBase):
name = models.CharField(max_length=50, unique=True)
slug = models.SlugField(unique=True)
description = models.TextField(null=True)
last_post = models.ForeignKey('Post', related_name='last_post_in_forum',
null=True, on_delete=models.SET_NULL)
# Dictates the order in which forums are displayed in the forum list.
display_order = models.IntegerField(default=1, db_index=True)
# Whether or not this forum is visible in the forum list.
is_listed = models.BooleanField(default=True, db_index=True)
class Meta(object):
ordering = ['display_order', 'id']
permissions = (
('view_in_forum', 'Can view restricted forums'),
('post_in_forum', 'Can post in restricted forums'))
def __unicode__(self):
return self.name
def get_absolute_url(self):
return reverse('forums.threads', kwargs={'forum_slug': self.slug})
def allows_viewing_by(self, user):
"""Return whether a user can view me, my threads, and their posts."""
return (self._allows_public_viewing() or
has_perm(user, 'forums_forum.view_in_forum', self))
def _allows_public_viewing(self):
"""Return whether I am a world-readable forum.
If a django-authority permission relates to me, I am considered non-
public. (We assume that you attached a permission to me in order to
assign it to some users or groups.) Considered adding a Public flag to
this model, but we didn't want it to show up on form and thus be
accidentally flippable by readers of the Admin forum, who are all
privileged enough to do so.
"""
return not perm_is_defined_on('forums_forum.view_in_forum', self)
def allows_posting_by(self, user):
"""Return whether a user can make threads and posts in me."""
return (self._allows_public_posting() or
has_perm(user, 'forums_forum.post_in_forum', self))
def _allows_public_posting(self):
"""Return whether I am a world-writable forum."""
return not perm_is_defined_on('forums_forum.post_in_forum', self)
def update_last_post(self, exclude_thread=None, exclude_post=None):
"""Set my last post to the newest, excluding given thread and post."""
posts = Post.objects.filter(thread__forum=self)
if exclude_thread:
posts = posts.exclude(thread=exclude_thread)
self.last_post = _last_post_from(posts, exclude_post=exclude_post)
@classmethod
def authorized_forums_for_user(cls, user):
"""Returns the forums this user is authorized to view"""
return [f for f in Forum.objects.all() if f.allows_viewing_by(user)]
class Thread(NotificationsMixin, ModelBase, SearchMixin):
title = models.CharField(max_length=255)
forum = models.ForeignKey('Forum')
created = models.DateTimeField(default=datetime.datetime.now,
db_index=True)
creator = models.ForeignKey(User)
last_post = models.ForeignKey('Post', related_name='last_post_in',
null=True, on_delete=models.SET_NULL)
replies = models.IntegerField(default=0)
is_locked = models.BooleanField(default=False)
is_sticky = models.BooleanField(default=False, db_index=True)
class Meta:
ordering = ['-is_sticky', '-last_post__created']
def __setattr__(self, attr, val):
"""Notice when the forum field changes.
A property won't do here, because it usurps the "forum" name and
prevents us from using lookups like Thread.objects.filter(forum=f).
"""
if attr == 'forum' and not hasattr(self, '_old_forum'):
try:
self._old_forum = self.forum
except ObjectDoesNotExist:
pass
super(Thread, self).__setattr__(attr, val)
@property
def last_page(self):
"""Returns the page number for the last post."""
return self.replies / forums.POSTS_PER_PAGE + 1
def __unicode__(self):
return self.title
def delete(self, *args, **kwargs):
"""Override delete method to update parent forum info."""
forum = Forum.objects.get(pk=self.forum.id)
if forum.last_post and forum.last_post.thread_id == self.id:
forum.update_last_post(exclude_thread=self)
forum.save()
super(Thread, self).delete(*args, **kwargs)
def new_post(self, author, content):
"""Create a new post, if the thread is unlocked."""
if self.is_locked:
raise ThreadLockedError
return self.post_set.create(author=author, content=content)
def get_absolute_url(self):
return reverse('forums.posts', args=[self.forum.slug, self.id])
def get_last_post_url(self):
query = {'last': self.last_post_id}
page = self.last_page
if page > 1:
query['page'] = page
url = reverse('forums.posts', args=[self.forum.slug, self.id])
return urlparams(url, hash='post-%s' % self.last_post_id, **query)
def save(self, *args, **kwargs):
super(Thread, self).save(*args, **kwargs)
old_forum = getattr(self, '_old_forum', None)
new_forum = self.forum
if old_forum and old_forum != new_forum:
old_forum.update_last_post(exclude_thread=self)
old_forum.save()
new_forum.update_last_post()
new_forum.save()
del self._old_forum
def update_last_post(self, exclude_post=None):
"""Set my last post to the newest, excluding the given post."""
last = _last_post_from(self.post_set, exclude_post=exclude_post)
self.last_post = last
# If self.last_post is None, and this was called from Post.delete,
# then Post.delete will erase the thread, as well.
@classmethod
def get_mapping_type(cls):
return ThreadMappingType
@register_mapping_type
class ThreadMappingType(SearchMappingType):
seconds_ago_filter = 'last_post__created__gte'
@classmethod
def search(cls):
return super(ThreadMappingType, cls).search().order_by('created')
@classmethod
def get_model(cls):
return Thread
@classmethod
def get_query_fields(cls):
return ['post_title', 'post_content']
@classmethod
def get_mapping(cls):
return {
'properties': {
'id': {'type': 'long'},
'model': {'type': 'string', 'index': 'not_analyzed'},
'url': {'type': 'string', 'index': 'not_analyzed'},
'indexed_on': {'type': 'integer'},
'created': {'type': 'integer'},
'updated': {'type': 'integer'},
'post_forum_id': {'type': 'integer'},
'post_title': {'type': 'string', 'analyzer': 'snowball'},
'post_is_sticky': {'type': 'boolean'},
'post_is_locked': {'type': 'boolean'},
'post_author_id': {'type': 'integer'},
'post_author_ord': {'type': 'string', 'index': 'not_analyzed'},
'post_content': {'type': 'string', 'analyzer': 'snowball',
'store': 'yes',
'term_vector': 'with_positions_offsets'},
'post_replies': {'type': 'integer'}
}
}
@classmethod
def extract_document(cls, obj_id, obj=None):
"""Extracts interesting thing from a Thread and its Posts"""
if obj is None:
model = cls.get_model()
obj = model.objects.select_related('last_post').get(pk=obj_id)
d = {}
d['id'] = obj.id
d['model'] = cls.get_mapping_type_name()
d['url'] = obj.get_absolute_url()
d['indexed_on'] = int(time.time())
# TODO: Sphinx stores created and updated as seconds since the
# epoch, so we convert them to that format here so that the
# search view works correctly. When we ditch Sphinx, we should
# see if it's faster to filter on ints or whether we should
# switch them to dates.
d['created'] = int(time.mktime(obj.created.timetuple()))
if obj.last_post is not None:
d['updated'] = int(time.mktime(obj.last_post.created.timetuple()))
else:
d['updated'] = None
d['post_forum_id'] = obj.forum.id
d['post_title'] = obj.title
d['post_is_sticky'] = obj.is_sticky
d['post_is_locked'] = obj.is_locked
d['post_replies'] = obj.replies
author_ids = set()
author_ords = set()
content = []
posts = Post.objects.filter(
thread_id=obj.id).select_related('author')
for post in posts:
author_ids.add(post.author.id)
author_ords.add(post.author.username)
content.append(post.content)
d['post_author_id'] = list(author_ids)
d['post_author_ord'] = list(author_ords)
d['post_content'] = content
return d
register_for_indexing('forums', Thread)
class Post(ModelBase):
thread = models.ForeignKey('Thread')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField(default=datetime.datetime.now,
db_index=True)
updated = models.DateTimeField(default=datetime.datetime.now,
db_index=True)
updated_by = models.ForeignKey(User,
related_name='post_last_updated_by',
null=True)
flags = GenericRelation(FlaggedObject)
class Meta:
ordering = ['created']
def __unicode__(self):
return self.content[:50]
def save(self, *args, **kwargs):
"""
Override save method to update parent thread info and take care of
created and updated.
"""
new = self.id is None
if not new:
self.updated = datetime.datetime.now()
super(Post, self).save(*args, **kwargs)
if new:
self.thread.replies = self.thread.post_set.count() - 1
self.thread.last_post = self
self.thread.save()
self.thread.forum.last_post = self
self.thread.forum.save()
def delete(self, *args, **kwargs):
"""Override delete method to update parent thread info."""
thread = Thread.objects.get(pk=self.thread.id)
if thread.last_post_id and thread.last_post_id == self.id:
thread.update_last_post(exclude_post=self)
thread.replies = thread.post_set.count() - 2
thread.save()
forum = Forum.objects.get(pk=thread.forum.id)
if forum.last_post_id and forum.last_post_id == self.id:
forum.update_last_post(exclude_post=self)
forum.save()
super(Post, self).delete(*args, **kwargs)
# If I was the last post in the thread, delete the thread.
if thread.last_post is None:
thread.delete()
@property
def page(self):
"""Get the page of the thread on which this post is found."""
t = self.thread
earlier = t.post_set.filter(created__lte=self.created).count() - 1
if earlier < 1:
return 1
return earlier / forums.POSTS_PER_PAGE + 1
def get_absolute_url(self):
query = {}
if self.page > 1:
query = {'page': self.page}
url_ = self.thread.get_absolute_url()
return urlparams(url_, hash='post-%s' % self.id, **query)
@property
def content_parsed(self):
return wiki_to_html(self.content)
register_for_indexing('forums', Post, instance_to_indexee=lambda p: p.thread)
def user_pre_save(sender, instance, **kw):
"""When a user's username is changed, we must reindex the threads
they participated in.
"""
if instance.id:
user = User.objects.get(id=instance.id)
if user.username != instance.username:
threads = (
Thread.objects
.filter(
Q(creator=instance) |
Q(post__author=instance))
.only('id')
.distinct())
for t in threads:
t.index_later()
pre_save.connect(
user_pre_save, sender=User, dispatch_uid='forums_user_pre_save')
|
import dockci.commands
from dockci.server import APP, app_init, MANAGER
if __name__ == "__main__":
app_init()
MANAGER.run()
|
import os, logging
from PIL import Image
from sqlalchemy.orm.session import object_session
from sqlalchemy.orm.util import identity_key
from iktomi.unstable.utils.image_resizers import ResizeFit
from iktomi.utils import cached_property
from ..files import TransientFile, PersistentFile
from .files import FileEventHandlers, FileProperty
logger = logging.getLogger(__name__)
class ImageFile(PersistentFile):
def _get_properties(self, properties=['width', 'height']):
if 'width' in properties or 'height' in properties:
image = Image.open(self.path)
self.width, self.height = image.size
@cached_property
def width(self):
self._get_properties(['width'])
return self.width
@cached_property
def height(self):
self._get_properties(['height'])
return self.height
class ImageEventHandlers(FileEventHandlers):
def _2persistent(self, target, transient):
# XXX move this method to file_manager
# XXX Do this check or not?
image = Image.open(transient.path)
assert image.format in Image.SAVE and image.format != 'bmp',\
'Unsupported image format'
if self.prop.image_sizes:
session = object_session(target)
persistent_name = getattr(target, self.prop.attribute_name)
pn, ext = os.path.splitext(persistent_name)
image_crop = self.prop.resize(image, self.prop.image_sizes)
if self.prop.force_rgb and image_crop.mode not in ['RGB', 'RGBA']:
image_crop = image_crop.convert('RGB')
if ext == '.gif':
image_crop.format = 'jpeg'
ext = '.jpeg'
if self.prop.enhancements:
for enhance, factor in self.prop.enhancements:
image_crop = enhance(image_crop).enhance(factor)
if self.prop.filter:
image_crop = image_crop.filter(self.prop.filter)
if not ext:
# set extension if it is not set
ext = '.' + image.format.lower()
if pn + ext != persistent_name:
persistent_name = pn + ext
# XXX hack?
setattr(target, self.prop.attribute_name, persistent_name)
image_attr = getattr(target.__class__, self.prop.key)
file_manager = persistent = session.find_file_manager(image_attr)
persistent = file_manager.get_persistent(persistent_name,
self.prop.persistent_cls)
transient = session.find_file_manager(image_attr).new_transient(ext)
kw = dict(quality=self.prop.quality)
if self.prop.optimize:
kw = dict(kw, optimize=True)
image_crop.save(transient.path, **kw)
session.find_file_manager(image_attr).store(transient, persistent)
return persistent
else:
# Attention! This method can accept PersistentFile.
# In this case one shold NEVER been deleted or rewritten.
assert isinstance(transient, TransientFile), repr(transient)
return FileEventHandlers._2persistent(self, target, transient)
def before_update(self, mapper, connection, target):
FileEventHandlers.before_update(self, mapper, connection, target)
self._fill_img(mapper, connection, target)
def before_insert(self, mapper, connection, target):
FileEventHandlers.before_insert(self, mapper, connection, target)
self._fill_img(mapper, connection, target)
def _fill_img(self, mapper, connection, target):
if self.prop.fill_from:
# XXX Looks hacky
value = getattr(target, self.prop.key)
if value is None:
base = getattr(target, self.prop.fill_from)
if base is None:
return
if not os.path.isfile(base.path):
logger.warn('Original file is absent %s %s %s',
identity_key(instance=target),
self.prop.fill_from,
base.path)
return
ext = os.path.splitext(base.name)[1]
session = object_session(target)
image_attr = getattr(target.__class__, self.prop.key)
name = session.find_file_manager(image_attr).new_file_name(
self.prop.name_template, target, ext, '')
setattr(target, self.prop.attribute_name, name)
persistent = self._2persistent(target, base)
setattr(target, self.prop.key, persistent)
class ImageProperty(FileProperty):
event_cls = ImageEventHandlers
def _set_options(self, options):
# XXX rename image_sizes?
options = dict(options)
self.image_sizes = options.pop('image_sizes', None)
self.resize = options.pop('resize', None) or ResizeFit()
# XXX implement
self.fill_from = options.pop('fill_from', None)
self.filter = options.pop('filter', None)
self.enhancements = options.pop('enhancements', [])
self.force_rgb = self.enhancements or \
self.filter or \
options.pop('force_rgb', True)
self.quality = options.pop('quality', 85)
self.optimize = options.pop('optimize', False)
assert self.fill_from is None or self.image_sizes is not None
options.setdefault('persistent_cls', ImageFile)
FileProperty._set_options(self, options)
|
t = int(raw_input())
MOD = 10**9 + 7
def modexp(a,b):
res = 1
while b:
if b&1:
res *= a
res %= MOD
a = (a*a)%MOD
b /= 2
return res
fn = [1 for _ in xrange(100001)]
ifn = [1 for _ in xrange(100001)]
for i in range(1,100000):
fn[i] = fn[i-1] * i
fn[i] %= MOD
ifn[i] = modexp(fn[i],MOD-2)
def nCr(n,k):
return fn[n] * ifn[k] * ifn[n-k]
for ti in range(t):
n = int(raw_input())
a = map(int,raw_input().split())
ans = 0
for i in range(n):
if i%2==0:
ans += nCr(n-1,i)%MOD * a[i]%MOD
else:
ans -= nCr(n-1,i)%MOD * a[i]%MOD
ans %= MOD
print ans
|
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/lok/shared_mining_cave_01.iff"
result.attribute_template_id = -1
result.stfName("building_name","cave")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_rebel_brigadier_general_sullustan_male.iff"
result.attribute_template_id = 9
result.stfName("npc_name","sullustan_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
from __future__ import absolute_import
from jinja2 import Markup
from rstblog.programs import RSTProgram
import typogrify
class TypogrifyRSTProgram(RSTProgram):
def get_fragments(self):
if self._fragment_cache is not None:
return self._fragment_cache
with self.context.open_source_file() as f:
self.get_header(f)
rv = self.context.render_rst(f.read().decode('utf-8'))
rv['fragment'] = Markup(typogrify.typogrify(rv['fragment']))
self._fragment_cache = rv
return rv
def setup(builder):
builder.programs['rst'] = TypogrifyRSTProgram
|
from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
|
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/creature/npc/droid/crafted/shared_cll_8_binary_load_lifter_advanced.iff"
result.attribute_template_id = 3
result.stfName("droid_name","cll_8_binary_load_lifter_crafted_advanced")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
import operator
import mock
import pytest
from okcupyd import User
from okcupyd import magicnumbers
from okcupyd.magicnumbers import maps
from okcupyd.profile import Profile
from okcupyd.json_search import SearchFetchable, search
from okcupyd.location import LocationQueryCache
from okcupyd.session import Session
from . import util
SEARCH_FILTERS_BEING_REIMPLEMENTED = "SEARCH_FILTERS_ARE_BEING_REIMPLEMENTED"
@util.use_cassette
def test_age_filter():
age = 22
search_fetchable = SearchFetchable(gentation='everybody',
minimum_age=age, maximum_age=age)
for profile in search_fetchable[:5]:
assert profile.age == age
@util.use_cassette
def test_count_variable(request):
profiles = search(gentation='everybody', count=14)
assert len(profiles) == 14
for profile in profiles:
profile.username
profile.age
profile.location
profile.match_percentage
profile.enemy_percentage
profile.id
profile.rating
profile.contacted
@util.use_cassette
def test_location_filter():
session = Session.login()
location_cache = LocationQueryCache(session)
location = 'Portland, OR'
search_fetchable = SearchFetchable(location=location, location_cache=location_cache, radius=1)
for profile in search_fetchable[:5]:
assert profile.location == 'Portland, OR'
@util.use_cassette(path='search_function')
def test_search_function():
profile, = search(count=1)
assert isinstance(profile, Profile)
profile.username
profile.age
profile.location
profile.match_percentage
profile.enemy_percentage
profile.id
profile.rating
profile.contacted
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_search_fetchable_iter():
search_fetchable = SearchFetchable(gentation='everybody',
religion='buddhist', age_min=25, age_max=25,
location='new york, ny', keywords='bicycle')
for count, profile in enumerate(search_fetchable):
assert isinstance(profile, Profile)
if count > 30:
break
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_easy_search_filters():
session = Session.login()
query_test_pairs = [# ('bodytype', maps.bodytype),
# TODO(@IvanMalison) this is an alist feature,
# so it can't be tested for now.
('drugs', maps.drugs), ('smokes', maps.smokes),
('diet', maps.diet,), ('job', maps.job)]
for query_param, re_map in query_test_pairs:
for value in sorted(re_map.pattern_to_value.keys()):
profile = SearchFetchable(**{
'gentation': '',
'session': session,
'count': 1,
query_param: value
})[0]
attribute = getattr(profile.details, query_param)
assert value in (attribute or '').lower()
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_children_filter():
session = Session.login()
profile = SearchFetchable(session, wants_kids="wants kids", count=1)[0]
assert "wants" in profile.details.children.lower()
profile = SearchFetchable(session, has_kids=["has kids"],
wants_kids="doesn't want kids",
count=0)[0]
assert "has kids" in profile.details.children.lower()
assert "doesn't want" in profile.details.children.lower()
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_pets_queries():
session = Session.login()
profile = SearchFetchable(session, cats=['dislikes cats', 'likes cats'],
count=1)[0]
assert 'likes cats' in profile.details.pets.lower()
profile = SearchFetchable(session, dogs='likes dogs', cats='has cats', count=1)[0]
assert 'likes dogs' in profile.details.pets.lower()
assert 'has cats' in profile.details.pets.lower()
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_height_filter():
session = Session.login()
profile = SearchFetchable(session, height_min='5\'6"', height_max='5\'6"',
gentation='girls who like guys', radius=25, count=1)[0]
match = magicnumbers.imperial_re.search(profile.details.height)
assert int(match.group(1)) == 5
assert int(match.group(2)) == 6
profile = SearchFetchable(session, height_min='2.00m', count=1)[0]
match = magicnumbers.metric_re.search(profile.details.height)
assert float(match.group(1)) >= 2.00
profile = SearchFetchable(session, height_max='1.5m', count=1)[0]
match = magicnumbers.metric_re.search(profile.details.height)
assert float(match.group(1)) <= 1.5
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_language_filter():
session = Session.login()
profile = SearchFetchable(session, language='french', count=1)[0]
assert 'french' in [language_info[0].lower()
for language_info in profile.details.languages]
profile = SearchFetchable(session, language='Afrikaans', count=1)[0]
assert 'afrikaans' in map(operator.itemgetter(0), profile.details.languages)
@pytest.mark.xfail
@util.use_cassette
def test_attractiveness_filter():
session = Session.login()
profile = SearchFetchable(session, attractiveness_min=4000,
attractiveness_max=6000, count=1)[0]
assert profile.attractiveness > 4000
assert profile.attractiveness < 6000
@pytest.mark.xfail
@util.use_cassette
def test_question_filter():
user = User()
user_question = user.questions.somewhat_important[0]
for profile in user.search(question=user_question)[:5]:
question = profile.find_question(user_question.id)
assert question.their_answer_matches
@pytest.mark.xfail
@util.use_cassette
def test_question_filter_with_custom_answers():
user = User()
user_question = user.questions.somewhat_important[1]
unacceptable_answers = [answer_option.id
for answer_option in user_question.answer_options
if not answer_option.is_match]
for profile in user.search(question=user_question.id,
question_answers=unacceptable_answers)[:5]:
question = profile.find_question(user_question.id)
assert not question.their_answer_matches
@pytest.mark.xfail
@util.use_cassette
def test_question_count_filter():
user = User()
for profile in user.search(question_count_min=250)[:5]:
assert profile.questions[249]
@pytest.mark.xfail(reason="ProfileBuilder needs to be improved to actually get data from profile results")
@util.use_cassette
def test_search_populates_upfront():
user = User()
search_fetchable = user.search()
for profile in search_fetchable[:4]:
profile_session = profile._session
with mock.patch.object(profile, '_session') as mock_session:
mock_session.okc_get.side_effect = profile_session.okc_get
assert profile.id > 0
assert mock_session.okc_get.call_count == 0
profile.essays.self_summary
assert mock_session.okc_get.call_count == 1
|
from .base import DerivedType
from categorical import CategoricalComparator
from .categorical_type import CategoricalType
class ExistsType(CategoricalType) :
type = "Exists"
_predicate_functions = []
def __init__(self, definition) :
super(CategoricalType, self ).__init__(definition)
self.cat_comparator = CategoricalComparator([0,1])
self.higher_vars = []
for higher_var in self.cat_comparator.dummy_names :
dummy_var = DerivedType({'name' : higher_var,
'type' : 'Dummy',
'has missing' : self.has_missing})
self.higher_vars.append(dummy_var)
def comparator(self, field_1, field_2) :
if field_1 and field_2 :
return self.cat_comparator(1, 1)
elif field_1 or field_2 :
return self.cat_comparator(0, 1)
else :
return self.cat_comparator(0, 0)
# This flag tells fieldDistances in dedupe.core to pass
# missing values (None) into the comparator
comparator.missing = True
|
import sys, os
import pickle
import nltk
import paths
from utils import *
def words_to_dict(words):
return dict(zip(words, range(0, len(words))))
nltk.data.path.append(paths.nltk_data_path)
use_wordnet = True
if use_wordnet:
stemmer = nltk.stem.wordnet.WordNetLemmatizer()
stem = stemmer.lemmatize
else:
stemmer = nltk.stem.porter.PorterStemmer()
stem = stemmer.stem
def tokens(text):
replacements = [("---"," "),
("--"," "),
("-", "")] # trying to capture multi-word keywords
for (src,tgt) in replacements:
text = text.replace(src,tgt)
return preprocess(text)
def make_bow(doc,d):
bow = {}
for word in doc:
if word in d:
wordid = d[word]
bow[wordid] = bow.get(wordid,0) + 1
# XXX we should notify something about non-stopwords that we couldn't parse
return bow
modes = ["fulltext","abstracts"]
ks = ["20","50","100","200"]
dist = ["kl","euclidean"]
if __name__ == '__main__':
args = sys.argv[1:]
mode = modes[0]
k = ks[0]
dfun = dist[0]
num = 20
while len(args) > 1:
if args[0] == "-k":
if args[1] in ks:
k = args[1]
args = args[2:]
if args[0] in ["-m","--mode"]:
if args[1] in modes:
mode = args[1]
args = args[2:]
if args[0] in ["-n","--num"]:
if int(args[1]) in range(1,50):
num = int(args[1])
args = args[2:]
if args[0] in ["-d","--distance"]:
if args[1] in dist:
dfun = args[1]
args = args[2:]
model = os.path.join(mode,"lda" + k,"final")
words = os.path.join(mode,"vocab.dat")
docs = os.path.join(mode,"docs.dat")
pdf_file = args[0]
(base,_) = os.path.splitext(pdf_file)
text = os.popen("/usr/bin/pdftotext \"%s\" -" % pdf_file).read() # XXX safe filenames!
vocab = words_to_dict(open(words).read().split())
bow = make_bow(map(stem,tokens(text)),vocab)
dat_file = base + ".dat"
out = open(dat_file,"w")
out.write(str(len(bow)))
out.write(' ')
for term in bow:
out.write(str(term))
out.write(':')
out.write(str(bow[term]))
out.write(' ')
out.write('\n')
out.close()
log = base + ".log"
os.system(paths.lda + " inf settings.txt %s %s %s >%s 2>&1" % (model,dat_file,base,log))
# XXX capture output, handle errors
inf = read(base + "-gamma.dat")
gammas = read(model + ".gamma")
papers = zip(read(docs), map(lambda s: map(float,s.split()), gammas))
tgt = ["INPUT PDF"] + map(lambda s: map(float,s.split()), inf)
# XXX these are the topic values, if we want to visualize them
# XXX be careful to not leak our filenames
if dfun == "euclidean":
metric = distance
fmt = '%d'
elif dfun == "kl":
metric = kl_divergence
fmt = '%f'
else:
metric = kl_divergence
fmt = '%f'
papers = map(lambda s: (metric(s[1],tgt[1]),s), papers)
papers.sort(lambda x,y: cmp(x[0],y[0]))
print "\nRelated papers:\n"
for (d,(doc,gs)) in papers[0:num]:
print (' %s (' + fmt + ')') % (doc,d)
|
"""calibrated_image.py was written by Ryan Petersburg for use with fiber
characterization on the EXtreme PREcision Spectrograph
"""
import numpy as np
from .base_image import BaseImage
from .numpy_array_handler import filter_image, subframe_image
class CalibratedImage(BaseImage):
"""Fiber face image analysis class
Class that contains calibration images and executes corrections based on
those images
Attributes
----------
dark : str, array_like, or None
The input used to set the dark image. See
BaseImage.convert_image_to_array() for details
ambient : str, array_like, or None
The input used to set the ambient image. See
BaseImage.convert_image_to_array() for details
flat : str, array_like, or None
The input used to set the flat image. See
BaseImage.convert_image_to_array() for details
kernel_size : int (odd)
The kernel side length used when filtering the image. This value may
need to be tweaked, especially with few co-added images, due to random
noise. The filtered image is used for the centering algorithms, so for
a "true test" use kernel_size=1, but be careful, because this may
lead to needing a fairly high threshold for the noise.
new_calibration : bool
Whether or not self.calibration has been set with new images
Args
----
image_input : str, array_like, or None, optional
See BaseImage class for details
dark : str, array_like, or None, optional
Image input to instantiate BaseImage for dark image
ambient : str, array_like, or None, optional
Image input to instantiate BaseImage for ambient image
flat : str, array_like, or None, optional
Image input to instantiate BaseImage for flat image
kernel_size : int (odd), optional
Set the kernel size for filtering
**kwargs : keworded arguments
Passed into the BaseImage superclass
"""
def __init__(self, image_input, dark=None, ambient=None, flat=None,
kernel_size=9, **kwargs):
self.dark = dark
self.ambient = ambient
self.flat = flat
self.kernel_size = kernel_size
self.new_calibration = True
super(CalibratedImage, self).__init__(image_input, **kwargs)
#=========================================================================#
#==== Primary Image Getters ==============================================#
#=========================================================================#
def get_uncorrected_image(self):
"""Return the raw image without corrections or filtering.
Returns
-------
uncorrected_image : 2D numpy array
Raw image or average of images (depending on image_input)
"""
return self.convert_image_to_array(self.image_input)
def get_image(self):
"""Return the corrected image
This method must be called to get access to the corrected 2D numpy
array being analyzed. Attempts to access a previously saved image
under self.image_file or otherwise applies corrections to the raw
images pulled from their respective files
Returns
-------
image : 2D numpy array
Image corrected by calibration images
"""
if self.image_file is not None and not self.new_calibration:
return self.image_from_file(self.image_file)
return self.execute_error_corrections(self.get_uncorrected_image())
def get_uncorrected_filtered_image(self, kernel_size=None, **kwargs):
"""Return a median filtered image
Args
----
kernel_size : {None, int (odd)}, optional
The side length of the kernel used to median filter the image. Uses
self.kernel_size if None.
Returns
-------
filtered_image : 2D numpy array
The stored image median filtered with the given kernel_size
"""
image = self.get_uncorrected_image()
if image is None:
return None
if kernel_size is None:
kernel_size = self.kernel_size
return filter_image(image, kernel_size, **kwargs)
def get_filtered_image(self, kernel_size=None, **kwargs):
"""Return an error corrected and median filtered image
Returns
-------
filtered_image : 2D numpy array
The stored image median filtered with the given kernel_size and
error corrected using the given method
"""
image = self.get_image()
if image is None:
return None
if kernel_size is None:
kernel_size = self.kernel_size
return filter_image(image, kernel_size, **kwargs)
#=========================================================================#
#==== Calibration Image Getters ==========================================#
#=========================================================================#
def get_dark_image(self):
"""Returns the dark image.
Args
----
full_output : boolean, optional
Passed to converImageToArray function
Returns
-------
dark_image : 2D numpy array
The dark image
output_obj : ImageInfo, optional
Object containing information about the image, if full_output=True
"""
return BaseImage(self.dark).get_image()
def get_ambient_image(self):
"""Returns the ambient image.
Args
----
full_output : boolean, optional
Passed to converImageToArray function
Returns
-------
dark_image : 2D numpy array
The dark image
output_obj : ImageInfo, optional
Object containing information about the image, if full_output=True
"""
return CalibratedImage(self.ambient, dark=self.dark).get_image()
def get_flat_image(self):
"""Returns the flat image.
Args
----
full_output : boolean, optional
Passed to converImageToArray function
Returns
-------
dark_image : 2D numpy array
The dark image
output_obj : ImageInfo, optional
Object containing information about the image, if full_output=True
"""
return CalibratedImage(self.flat, dark=self.dark).get_image()
def set_dark(self, dark):
"""Sets the dark calibration image."""
self.dark = dark
self.new_calibration = True
def set_ambient(self, ambient):
"""Sets the ambient calibration image."""
self.ambient = ambient
self.new_calibration = True
def set_flat(self, flat):
"""Sets the flat calibration images."""
self.flat = flat
self.new_calibration = True
#=========================================================================#
#==== Image Calibration Algorithm ========================================#
#=========================================================================#
def execute_error_corrections(self, image):
"""Applies corrective images to image
Applies dark image to the flat field and ambient images. Then applies
flat field and ambient image correction to the primary image
Args
----
image : 2D numpy array
Image to be corrected
Returns
-------
corrected_image : 2D numpy array
Corrected image
"""
if image is None:
return None
corrected_image = image
dark_image = self.get_dark_image()
if dark_image is not None and dark_image.shape != corrected_image.shape:
dark_image = subframe_image(dark_image, self.subframe_x,
self.subframe_y, self.width,
self.height)
corrected_image = self.remove_dark_image(corrected_image,
dark_image)
ambient_image = self.get_ambient_image()
if ambient_image is not None:
if ambient_image.shape != corrected_image.shape:
ambient_image = subframe_image(ambient_image, self.subframe_x,
self.subframe_y, self.width,
self.height)
ambient_exp_time = BaseImage(self.ambient).exp_time
if self.exp_time is not None and ambient_exp_time != self.exp_time:
corrected_image = self.remove_dark_image(corrected_image,
ambient_image
* self.exp_time
/ ambient_exp_time)
else:
corrected_image = self.remove_dark_image(corrected_image,
ambient_image)
flat_image = self.get_flat_image()
if flat_image is not None:
if flat_image.shape != corrected_image.shape:
flat_image = subframe_image(flat_image, self.subframe_x,
self.subframe_y, self.width,
self.height)
corrected_image *= flat_image.mean() / flat_image
self.new_calibration = False
return corrected_image
def remove_dark_image(self, image, dark_image=None):
"""Uses dark image to correct image
Args
----
image : 2D numpy array
numpy array of the image
dark_image : 2D numpy array
dark image to be removed
Returns
-------
output_array : 2D numpy array
corrected image
"""
if dark_image is None:
dark_image = self.get_dark_image()
if dark_image is None:
dark_image = np.zeros_like(image)
output_image = image - dark_image
# Renormalize to the approximate smallest value (avoiding hot pixels)
output_image -= filter_image(output_image, 5).min()
# Prevent any dark/ambient image hot pixels from leaking through
output_image *= (output_image > -1000.0).astype('uint8')
return output_image
#=========================================================================#
#==== Attribute Setters ==================================================#
#=========================================================================#
def set_attributes_from_object(self, object_file):
super(CalibratedImage, self).set_attributes_from_object(object_file)
self.dark = self.change_path(self.dark)
self.ambient = self.change_path(self.ambient)
self.flat = self.change_path(self.flat)
|
from collections.abc import Iterable
from django import template
from django.db.models import Model
register = template.Library()
@register.filter
def get_type(value):
# inspired by: https://stackoverflow.com/a/12028864
return type(value)
@register.filter
def is_model(value):
return isinstance(value, Model)
@register.filter
def is_iterable(value):
return isinstance(value, Iterable)
@register.filter
def is_str(value):
return isinstance(value, str)
@register.filter
def is_bool(value):
return isinstance(value, bool)
|
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0003_add_verbose_names'),
('articles', '0075_auto_20151015_2022'),
]
operations = [
migrations.AddField(
model_name='articlepage',
name='video_document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', null=True),
),
]
|
from keras.models import Sequential
from keras.layers import convolutional
from keras.layers.core import Dense, Flatten
from SGD_exponential_decay import SGD_exponential_decay as SGD
K = 152 # depth of convolutional layers
LEARNING_RATE = .003 # initial learning rate
DECAY = 8.664339379294006e-08 # rate of exponential learning_rate decay
class value_trainer:
def __init__(self):
self.model = Sequential()
self.model.add(convolutional.Convolution2D(input_shape=(49, 19, 19), nb_filter=K, nb_row=5, nb_col=5,
init='uniform', activation='relu', border_mode='same'))
for i in range(2,13):
self.model.add(convolutional.Convolution2D(nb_filter=K, nb_row=3, nb_col=3,
init='uniform', activation='relu', border_mode='same'))
self.model.add(convolutional.Convolution2D(nb_filter=1, nb_row=1, nb_col=1,
init='uniform', activation='linear', border_mode='same'))
self.model.add(Flatten())
self.model.add(Dense(256,init='uniform'))
self.model.add(Dense(1,init='uniform',activation="tanh"))
sgd = SGD(lr=LEARNING_RATE, decay=DECAY)
self.model.compile(loss='mean_squared_error', optimizer=sgd)
def get_samples(self):
# TODO non-terminating loop that draws training samples uniformly at random
pass
def train(self):
# TODO use self.model.fit_generator to train from data source
pass
if __name__ == '__main__':
trainer = value_trainer()
# TODO command line instantiation
|
import random
import unittest
from lib import unigraph
class UnigraphExtra(unigraph.Unigraph):
def has_edge(self, left_vertex, right_vertex):
if left_vertex == right_vertex:
return True
else:
return right_vertex in self._vertices[left_vertex]
class UnigraphEdgeTestCase(unittest.TestCase):
def setUp(self):
self.graph = UnigraphExtra(random.randrange(10, 15))
for edge in range(2 * self.graph.vertices()):
f, t = (random.randrange(self.graph.vertices()) for x in range(2))
self.graph.add_edge(f, t)
def test_edge(self):
for vertex in range(self.graph.vertices()):
existing_vertices = set(self.graph._vertices[vertex])
all_vertices = set(range(self.graph.vertices()))
missing_vertices = all_vertices - all_vertices
for adj_vertex in existing_vertices:
self.assertTrue(self.graph.has_edge(vertex, adj_vertex))
for adj_vertex in missing_vertices:
self.assertFalse(self.graph.has_edge(vertex, adj_vertex))
def test_self_loop(self):
for vertex in range(self.graph.vertices()):
self.assertTrue(self.graph.has_edge(vertex, vertex))
if "__main__" == __name__:
unittest.main()
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
from django.utils.translation import ugettext as _
from django.contrib.admin.views.main import PAGE_VAR, ALL_VAR
from django.conf import settings
from django.contrib.sites.models import Site
from BeautifulSoup import BeautifulSoup
register = template.Library()
@register.simple_tag
def atb_site_link():
if settings.ADMINTOOLS_BOOTSTRAP_SITE_LINK:
return '''
<li><a href="%s" class="top-icon" title="%s" rel="popover" data-placement="below"><i
class="icon-home icon-white"></i></a></li>
<li class="divider-vertical"></li>
''' % (settings.ADMINTOOLS_BOOTSTRAP_SITE_LINK, _('Open site'))
else:
return ''
@register.simple_tag
def atb_site_name():
if 'django.contrib.sites' in settings.INSTALLED_APPS:
return Site.objects.get_current().name
else:
return _('Django site')
@register.simple_tag
def bootstrap_page_url(cl, page_num):
"""
generates page URL for given page_num, uses for prev and next links
django numerates pages from 0
"""
return escape(cl.get_query_string({PAGE_VAR: page_num-1}))
DOT = '.'
def bootstrap_paginator_number(cl,i, li_class=None):
"""
Generates an individual page index link in a paginated list.
"""
if i == DOT:
return u'<li><a href="#">...</a></li>'
elif i == cl.page_num:
return mark_safe(u'<li class="active"><a href="#">%d</a></li> ' % (i+1))
else:
return mark_safe(u'<li><a href="%s">%d</a></li>' % (escape(cl.get_query_string({PAGE_VAR: i})), i+1))
paginator_number = register.simple_tag(bootstrap_paginator_number)
def bootstrap_pagination(cl):
"""
Generates the series of links to the pages in a paginated list.
"""
paginator, page_num = cl.paginator, cl.page_num
pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page
if not pagination_required:
page_range = []
else:
ON_EACH_SIDE = 3
ON_ENDS = 2
# If there are 10 or fewer pages, display links to every page.
# Otherwise, do some fancy
if paginator.num_pages <= 10:
page_range = range(paginator.num_pages)
else:
# Insert "smart" pagination links, so that there are always ON_ENDS
# links at either end of the list of pages, and there are always
# ON_EACH_SIDE links at either end of the "current page" link.
page_range = []
if page_num > (ON_EACH_SIDE + ON_ENDS):
page_range.extend(range(0, ON_EACH_SIDE - 1))
page_range.append(DOT)
page_range.extend(range(page_num - ON_EACH_SIDE, page_num + 1))
else:
page_range.extend(range(0, page_num + 1))
if page_num < (paginator.num_pages - ON_EACH_SIDE - ON_ENDS - 1):
page_range.extend(range(page_num + 1, page_num + ON_EACH_SIDE + 1))
page_range.append(DOT)
page_range.extend(range(paginator.num_pages - ON_ENDS, paginator.num_pages))
else:
page_range.extend(range(page_num + 1, paginator.num_pages))
need_show_all_link = cl.can_show_all and not cl.show_all and cl.multi_page
return {
'cl': cl,
'pagination_required': pagination_required,
'show_all_url': need_show_all_link and cl.get_query_string({ALL_VAR: ''}),
'page_range': page_range,
'ALL_VAR': ALL_VAR,
'1': 1,
'curr_page': cl.paginator.page(cl.page_num+1),
}
bootstrap_pagination = register.inclusion_tag('admin/pagination.html')(bootstrap_pagination)
class BreadcrumbsNode(template.Node):
"""
renders bootstrap breadcrumbs list.
usage::
{% breadcrumbs %}
url1|text1
url2|text2
text3
{% endbreadcrumbs %}
| is delimiter by default, you can use {% breadcrumbs delimiter_char %} to change it.
lines without delimiters are interpreted as active breadcrumbs
"""
def __init__(self, nodelist, delimiter):
self.nodelist = nodelist
self.delimiter = delimiter
def render(self, context):
data = self.nodelist.render(context).strip()
if not data:
return ''
try:
data.index('<div class="breadcrumbs">')
except ValueError:
lines = [ l.strip().split(self.delimiter) for l in data.split("\n") if l.strip() ]
else:
# data is django-style breadcrumbs, parsing
try:
soup = BeautifulSoup(data)
lines = [ (a.get('href'), a.text) for a in soup.findAll('a')]
lines.append([soup.find('div').text.split('›')[-1].strip()])
except Exception, e:
lines = [["Cannot parse breadcrumbs: %s" % unicode(e)]]
out = '<ul class="breadcrumb">'
curr = 0
for d in lines:
if d[0][0] == '*':
active = ' class="active"'
d[0] = d[0][1:]
else:
active = ''
curr += 1
if (len(lines) == curr):
# last
divider = ''
else:
divider = '<span class="divider">/</span>'
if len(d) == 2:
out += '<li%s><a href="%s">%s</a>%s</li>' % (active, d[0], d[1], divider)
elif len(d) == 1:
out += '<li%s>%s%s</li>' % (active, d[0], divider)
else:
raise ValueError('Invalid breadcrumb line: %s' % self.delimiter.join(d))
out += '</ul>'
return out
@register.tag(name='breadcrumbs')
def do_breadcrumbs(parser, token):
try:
tag_name, delimiter = token.contents.split(None, 1)
except ValueError:
delimiter = '|'
nodelist = parser.parse(('endbreadcrumbs',))
parser.delete_first_token()
return BreadcrumbsNode(nodelist, delimiter)
|
"""
Written by Daniel M. Aukes and CONTRIBUTORS
Email: danaukes<at>asu.edu.
Please see LICENSE for full license.
"""
import sys
import popupcad
import qt.QtCore as qc
import qt.QtGui as qg
if __name__=='__main__':
app = qg.QApplication(sys.argv[0])
filename_from = 'C:/Users/danaukes/Dropbox/zhis sentinal 11 files/modified/sentinal 11 manufacturing_R08.cad'
filename_to = 'C:/Users/danaukes/Dropbox/zhis sentinal 11 files/modified/sentinal 11 manufacturing_R09.cad'
d = popupcad.filetypes.design.Design.load_yaml(filename_from)
widget = qg.QDialog()
layout = qg.QVBoxLayout()
layout1 = qg.QHBoxLayout()
layout2 = qg.QHBoxLayout()
list1 = qg.QListWidget()
list2 = qg.QListWidget()
button_ok = qg.QPushButton('Ok')
button_cancel = qg.QPushButton('Cancel')
subdesign_list = list(d.subdesigns.values())
for item in subdesign_list:
list1.addItem(str(item))
list2.addItem(str(item))
layout1.addWidget(list1)
layout1.addWidget(list2)
layout2.addWidget(button_ok)
layout2.addWidget(button_cancel)
layout.addLayout(layout1)
layout.addLayout(layout2)
widget.setLayout(layout)
button_ok.pressed.connect(widget.accept)
button_cancel.pressed.connect(widget.reject)
if widget.exec_():
if len(list1.selectedIndexes())==1 and len(list2.selectedIndexes())==1:
ii_from = list1.selectedIndexes()[0].row()
ii_to = list2.selectedIndexes()[0].row()
print(ii_from,ii_to)
d.replace_subdesign_refs(subdesign_list[ii_from].id,subdesign_list[ii_to].id)
d.subdesigns.pop(subdesign_list[ii_from].id)
d.save_yaml(filename_to)
sys.exit(app.exec_())
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0006_auto_20160616_1640'),
]
operations = [
migrations.AlterField(
model_name='episode',
name='edit_key',
field=models.CharField(blank=True, default='41086227', help_text='key to allow unauthenticated users to edit this item.', max_length=32, null=True),
),
]
|
import os
import sys
import sublime
import sublime_plugin
st_version = 2
if sublime.version() == '' or int(sublime.version()) > 3000:
st_version = 3
reloader_name = 'codeformatter.reloader'
if st_version == 3:
reloader_name = 'CodeFormatter.' + reloader_name
from imp import reload
if reloader_name in sys.modules:
reload(sys.modules[reloader_name])
try:
# Python 3
from .codeformatter.formatter import Formatter
except (ValueError):
# Python 2
from codeformatter.formatter import Formatter
cprint = globals()['__builtins__']['print']
debug_mode = False
def plugin_loaded():
cprint('CodeFormatter: Plugin Initialized')
# settings = sublime.load_settings('CodeFormatter.sublime-settings')
# debug_mode = settings.get('codeformatter_debug', False)
# if debug_mode:
# from pprint import pprint
# pprint(settings)
# debug_write('Debug mode enabled')
# debug_write('Platform ' + sublime.platform() + ' ' + sublime.arch())
# debug_write('Sublime Version ' + sublime.version())
# debug_write('Settings ' + pprint(settings))
if (sublime.platform() != 'windows'):
import stat
path = (
sublime.packages_path() +
'/CodeFormatter/codeformatter/lib/phpbeautifier/fmt.phar'
)
st = os.stat(path)
os.chmod(path, st.st_mode | stat.S_IEXEC)
if st_version == 2:
plugin_loaded()
class CodeFormatterCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax=None, saving=None):
run_formatter(self.view, edit, syntax=syntax, saving=saving)
class CodeFormatterOpenTabsCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax=None):
window = sublime.active_window()
for view in window.views():
run_formatter(view, edit, quiet=True)
class CodeFormatterEventListener(sublime_plugin.EventListener):
def on_pre_save(self, view):
view.run_command('code_formatter', {'saving': True})
class CodeFormatterShowPhpTransformationsCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax=False):
import subprocess
import re
platform = sublime.platform()
settings = sublime.load_settings('CodeFormatter.sublime-settings')
opts = settings.get('codeformatter_php_options')
php_path = 'php'
if ('php_path' in opts and opts['php_path']):
php_path = opts['php_path']
php55_compat = False
if ('php55_compat' in opts and opts['php55_compat']):
php55_compat = opts['php55_compat']
cmd = []
cmd.append(str(php_path))
if php55_compat:
cmd.append(
'{}/CodeFormatter/codeformatter/lib/phpbeautifier/fmt.phar'.format(
sublime.packages_path()))
else:
cmd.append(
'{}/CodeFormatter/codeformatter/lib/phpbeautifier/phpf.phar'.format(
sublime.packages_path()))
cmd.append('--list')
#print(cmd)
stderr = ''
stdout = ''
try:
if (platform == 'windows'):
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
p = subprocess.Popen(
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo,
shell=False, creationflags=subprocess.SW_HIDE)
else:
p = subprocess.Popen(
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
except Exception as e:
stderr = str(e)
if (not stderr and not stdout):
stderr = 'Error while gethering list of php transformations'
if len(stderr) == 0 and len(stdout) > 0:
text = stdout.decode('utf-8')
text = re.sub(
'Usage:.*?PASSNAME', 'Available PHP Tranformations:', text)
window = self.view.window()
pt = window.get_output_panel('paneltranformations')
pt.set_read_only(False)
pt.insert(edit, pt.size(), text)
window.run_command(
'show_panel', {'panel': 'output.paneltranformations'})
else:
show_error('Formatter error:\n' + stderr)
def run_formatter(view, edit, *args, **kwargs):
if view.is_scratch():
show_error('File is scratch')
return
# default parameters
syntax = kwargs.get('syntax')
saving = kwargs.get('saving', False)
quiet = kwargs.get('quiet', False)
formatter = Formatter(view, syntax)
if not formatter.exists():
if not quiet and not saving:
show_error('Formatter for this file type ({}) not found.'.format(
formatter.syntax))
return
if (saving and not formatter.format_on_save_enabled()):
return
file_text = sublime.Region(0, view.size())
file_text_utf = view.substr(file_text).encode('utf-8')
if (len(file_text_utf) == 0):
return
stdout, stderr = formatter.format(file_text_utf)
if len(stderr) == 0 and len(stdout) > 0:
view.replace(edit, file_text, stdout)
elif not quiet:
show_error('Format error:\n' + stderr)
def console_write(text, prefix=False):
if prefix:
sys.stdout.write('CodeFormatter: ')
sys.stdout.write(text + '\n')
def debug_write(text, prefix=False):
console_write(text, True)
def show_error(text):
sublime.error_message(u'CodeFormatter\n\n%s' % text)
|
"""
"""
import json
import time
import urllib
import urllib2
from wechatUtil import MessageUtil
from wechatReply import TextReply
class RobotService(object):
"""Auto reply robot service"""
KEY = 'd92d20bc1d8bb3cff585bf746603b2a9'
url = 'http://www.tuling123.com/openapi/api'
@staticmethod
def auto_reply(req_info):
query = {'key': RobotService.KEY, 'info': req_info.encode('utf-8')}
headers = {'Content-type': 'text/html', 'charset': 'utf-8'}
data = urllib.urlencode(query)
req = urllib2.Request(RobotService.url, data)
f = urllib2.urlopen(req).read()
return json.loads(f).get('text').replace('<br>', '\n')
#return json.loads(f).get('text')
class WechatService(object):
"""process request"""
@staticmethod
def processRequest(request):
"""process different message types.
:param request: post request message
:return: None
"""
requestMap = MessageUtil.parseXml(request)
fromUserName = requestMap.get(u'FromUserName')
toUserName = requestMap.get(u'ToUserName')
createTime = requestMap.get(u'CreateTime')
msgType = requestMap.get(u'MsgType')
msgId = requestMap.get(u'MsgId')
textReply = TextReply()
textReply.setToUserName(fromUserName)
textReply.setFromUserName(toUserName)
textReply.setCreateTime(time.time())
textReply.setMsgType(MessageUtil.RESP_MESSAGE_TYPE_TEXT)
if msgType == MessageUtil.REQ_MESSAGE_TYPE_TEXT:
content = requestMap.get('Content').decode('utf-8') # note: decode first
#respContent = u'您发送的是文本消息:' + content
respContent = RobotService.auto_reply(content)
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_IMAGE:
respContent = u'您发送的是图片消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_VOICE:
respContent = u'您发送的是语音消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_VIDEO:
respContent = u'您发送的是视频消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_LOCATION:
respContent = u'您发送的是地理位置消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_LINK:
respContent = u'您发送的是链接消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_EVENT:
eventType = requestMap.get(u'Event')
if eventType == MessageUtil.EVENT_TYPE_SUBSCRIBE:
respContent = u'^_^谢谢您的关注,本公众号由王宁宁开发(python2.7+django1.4),如果你有兴趣继续开发,' \
u'可以联系我,就当打发时间了.'
elif eventType == MessageUtil.EVENT_TYPE_UNSUBSCRIBE:
pass
elif eventType == MessageUtil.EVENT_TYPE_SCAN:
# TODO
pass
elif eventType == MessageUtil.EVENT_TYPE_LOCATION:
# TODO
pass
elif eventType == MessageUtil.EVENT_TYPE_CLICK:
# TODO
pass
textReply.setContent(respContent)
respXml = MessageUtil.class2xml(textReply)
return respXml
"""
if msgType == 'text':
content = requestMap.get('Content')
# TODO
elif msgType == 'image':
picUrl = requestMap.get('PicUrl')
# TODO
elif msgType == 'voice':
mediaId = requestMap.get('MediaId')
format = requestMap.get('Format')
# TODO
elif msgType == 'video':
mediaId = requestMap.get('MediaId')
thumbMediaId = requestMap.get('ThumbMediaId')
# TODO
elif msgType == 'location':
lat = requestMap.get('Location_X')
lng = requestMap.get('Location_Y')
label = requestMap.get('Label')
scale = requestMap.get('Scale')
# TODO
elif msgType == 'link':
title = requestMap.get('Title')
description = requestMap.get('Description')
url = requestMap.get('Url')
"""
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Sharing.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
"""Simple, schema-based database abstraction layer for the datastore.
Modeled after Django's abstraction layer on top of SQL databases,
http://www.djangoproject.com/documentation/mode_api/. Ours is a little simpler
and a lot less code because the datastore is so much simpler than SQL
databases.
The programming model is to declare Python subclasses of the Model class,
declaring datastore properties as class members of that class. So if you want to
publish a story with title, body, and created date, you would do it like this:
class Story(db.Model):
title = db.StringProperty()
body = db.TextProperty()
created = db.DateTimeProperty(auto_now_add=True)
You can create a new Story in the datastore with this usage pattern:
story = Story(title='My title')
story.body = 'My body'
story.put()
You query for Story entities using built in query interfaces that map directly
to the syntax and semantics of the datastore:
stories = Story.all().filter('date >=', yesterday).order('-date')
for story in stories:
print story.title
The Property declarations enforce types by performing validation on assignment.
For example, the DateTimeProperty enforces that you assign valid datetime
objects, and if you supply the "required" option for a property, you will not
be able to assign None to that property.
We also support references between models, so if a story has comments, you
would represent it like this:
class Comment(db.Model):
story = db.ReferenceProperty(Story)
body = db.TextProperty()
When you get a story out of the datastore, the story reference is resolved
automatically the first time it is referenced, which makes it easy to use
model instances without performing additional queries by hand:
comment = Comment.get(key)
print comment.story.title
Likewise, you can access the set of comments that refer to each story through
this property through a reverse reference called comment_set, which is a Query
preconfigured to return all matching comments:
story = Story.get(key)
for comment in story.comment_set:
print comment.body
"""
import base64
import copy
import datetime
import logging
import re
import time
import urlparse
import warnings
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.datastore import datastore_pb
Error = datastore_errors.Error
BadValueError = datastore_errors.BadValueError
BadPropertyError = datastore_errors.BadPropertyError
BadRequestError = datastore_errors.BadRequestError
EntityNotFoundError = datastore_errors.EntityNotFoundError
BadArgumentError = datastore_errors.BadArgumentError
QueryNotFoundError = datastore_errors.QueryNotFoundError
TransactionNotFoundError = datastore_errors.TransactionNotFoundError
Rollback = datastore_errors.Rollback
TransactionFailedError = datastore_errors.TransactionFailedError
BadFilterError = datastore_errors.BadFilterError
BadQueryError = datastore_errors.BadQueryError
BadKeyError = datastore_errors.BadKeyError
InternalError = datastore_errors.InternalError
NeedIndexError = datastore_errors.NeedIndexError
Timeout = datastore_errors.Timeout
CommittedButStillApplying = datastore_errors.CommittedButStillApplying
ValidationError = BadValueError
Key = datastore_types.Key
Category = datastore_types.Category
Link = datastore_types.Link
Email = datastore_types.Email
GeoPt = datastore_types.GeoPt
IM = datastore_types.IM
PhoneNumber = datastore_types.PhoneNumber
PostalAddress = datastore_types.PostalAddress
Rating = datastore_types.Rating
Text = datastore_types.Text
Blob = datastore_types.Blob
ByteString = datastore_types.ByteString
BlobKey = datastore_types.BlobKey
READ_CAPABILITY = datastore.READ_CAPABILITY
WRITE_CAPABILITY = datastore.WRITE_CAPABILITY
STRONG_CONSISTENCY = datastore.STRONG_CONSISTENCY
EVENTUAL_CONSISTENCY = datastore.EVENTUAL_CONSISTENCY
_kind_map = {}
_SELF_REFERENCE = object()
_RESERVED_WORDS = set(['key_name'])
class NotSavedError(Error):
"""Raised when a saved-object action is performed on a non-saved object."""
class KindError(BadValueError):
"""Raised when an entity is used with incorrect Model."""
class PropertyError(Error):
"""Raised when non-existent property is referenced."""
class DuplicatePropertyError(Error):
"""Raised when a property is duplicated in a model definition."""
class ConfigurationError(Error):
"""Raised when a property or model is improperly configured."""
class ReservedWordError(Error):
"""Raised when a property is defined for a reserved word."""
class DerivedPropertyError(Error):
"""Raised when attempting to assign a value to a derived property."""
_ALLOWED_PROPERTY_TYPES = set([
basestring,
str,
unicode,
bool,
int,
long,
float,
Key,
datetime.datetime,
datetime.date,
datetime.time,
Blob,
ByteString,
Text,
users.User,
Category,
Link,
Email,
GeoPt,
IM,
PhoneNumber,
PostalAddress,
Rating,
BlobKey,
])
_ALLOWED_EXPANDO_PROPERTY_TYPES = set(_ALLOWED_PROPERTY_TYPES)
_ALLOWED_EXPANDO_PROPERTY_TYPES.update((list, tuple, type(None)))
_OPERATORS = ['<', '<=', '>', '>=', '=', '==', '!=', 'in']
_FILTER_REGEX = re.compile(
'^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(_OPERATORS),
re.IGNORECASE | re.UNICODE)
def class_for_kind(kind):
"""Return base-class responsible for implementing kind.
Necessary to recover the class responsible for implementing provided
kind.
Args:
kind: Entity kind string.
Returns:
Class implementation for kind.
Raises:
KindError when there is no implementation for kind.
"""
try:
return _kind_map[kind]
except KeyError:
raise KindError('No implementation for kind \'%s\'' % kind)
def check_reserved_word(attr_name):
"""Raise an exception if attribute name is a reserved word.
Args:
attr_name: Name to check to see if it is a reserved word.
Raises:
ReservedWordError when attr_name is determined to be a reserved word.
"""
if datastore_types.RESERVED_PROPERTY_NAME.match(attr_name):
raise ReservedWordError(
"Cannot define property. All names both beginning and "
"ending with '__' are reserved.")
if attr_name in _RESERVED_WORDS or attr_name in dir(Model):
raise ReservedWordError(
"Cannot define property using reserved word '%(attr_name)s'. "
"If you would like to use this name in the datastore consider "
"using a different name like %(attr_name)s_ and adding "
"name='%(attr_name)s' to the parameter list of the property "
"definition." % locals())
def query_descendants(model_instance):
"""Returns a query for all the descendants of a model instance.
Args:
model_instance: Model instance to find the descendants of.
Returns:
Query that will retrieve all entities that have the given model instance
as an ancestor. Unlike normal ancestor queries, this does not include the
ancestor itself.
"""
result = Query().ancestor(model_instance);
result.filter(datastore_types._KEY_SPECIAL_PROPERTY + ' >',
model_instance.key());
return result;
def model_to_protobuf(model_instance, _entity_class=datastore.Entity):
"""Encodes a model instance as a protocol buffer.
Args:
model_instance: Model instance to encode.
Returns:
entity_pb.EntityProto representation of the model instance
"""
return model_instance._populate_entity(_entity_class).ToPb()
def model_from_protobuf(pb, _entity_class=datastore.Entity):
"""Decodes a model instance from a protocol buffer.
Args:
pb: The protocol buffer representation of the model instance. Can be an
entity_pb.EntityProto or str encoding of an entity_bp.EntityProto
Returns:
Model instance resulting from decoding the protocol buffer
"""
entity = _entity_class.FromPb(pb)
return class_for_kind(entity.kind()).from_entity(entity)
def _initialize_properties(model_class, name, bases, dct):
"""Initialize Property attributes for Model-class.
Args:
model_class: Model class to initialize properties for.
"""
model_class._properties = {}
property_source = {}
def get_attr_source(name, cls):
for src_cls in cls.mro():
if name in src_cls.__dict__:
return src_cls
defined = set()
for base in bases:
if hasattr(base, '_properties'):
property_keys = set(base._properties.keys())
duplicate_property_keys = defined & property_keys
for dupe_prop_name in duplicate_property_keys:
old_source = property_source[dupe_prop_name] = get_attr_source(
dupe_prop_name, property_source[dupe_prop_name])
new_source = get_attr_source(dupe_prop_name, base)
if old_source != new_source:
raise DuplicatePropertyError(
'Duplicate property, %s, is inherited from both %s and %s.' %
(dupe_prop_name, old_source.__name__, new_source.__name__))
property_keys -= duplicate_property_keys
if property_keys:
defined |= property_keys
property_source.update(dict.fromkeys(property_keys, base))
model_class._properties.update(base._properties)
for attr_name in dct.keys():
attr = dct[attr_name]
if isinstance(attr, Property):
check_reserved_word(attr_name)
if attr_name in defined:
raise DuplicatePropertyError('Duplicate property: %s' % attr_name)
defined.add(attr_name)
model_class._properties[attr_name] = attr
attr.__property_config__(model_class, attr_name)
model_class._unindexed_properties = frozenset(
name for name, prop in model_class._properties.items() if not prop.indexed)
def _coerce_to_key(value):
"""Returns the value's key.
Args:
value: a Model or Key instance or string encoded key or None
Returns:
The corresponding key, or None if value is None.
"""
if value is None:
return None
value, multiple = datastore.NormalizeAndTypeCheck(
value, (Model, Key, basestring))
if len(value) > 1:
raise datastore_errors.BadArgumentError('Expected only one model or key')
value = value[0]
if isinstance(value, Model):
return value.key()
elif isinstance(value, basestring):
return Key(value)
else:
return value
class PropertiedClass(type):
"""Meta-class for initializing Model classes properties.
Used for initializing Properties defined in the context of a model.
By using a meta-class much of the configuration of a Property
descriptor becomes implicit. By using this meta-class, descriptors
that are of class Model are notified about which class they
belong to and what attribute they are associated with and can
do appropriate initialization via __property_config__.
Duplicate properties are not permitted.
"""
def __init__(cls, name, bases, dct, map_kind=True):
"""Initializes a class that might have property definitions.
This method is called when a class is created with the PropertiedClass
meta-class.
Loads all properties for this model and its base classes in to a dictionary
for easy reflection via the 'properties' method.
Configures each property defined in the new class.
Duplicate properties, either defined in the new class or defined separately
in two base classes are not permitted.
Properties may not assigned to names which are in the list of
_RESERVED_WORDS. It is still possible to store a property using a reserved
word in the datastore by using the 'name' keyword argument to the Property
constructor.
Args:
cls: Class being initialized.
name: Name of new class.
bases: Base classes of new class.
dct: Dictionary of new definitions for class.
Raises:
DuplicatePropertyError when a property is duplicated either in the new
class or separately in two base classes.
ReservedWordError when a property is given a name that is in the list of
reserved words, attributes of Model and names of the form '__.*__'.
"""
super(PropertiedClass, cls).__init__(name, bases, dct)
_initialize_properties(cls, name, bases, dct)
if map_kind:
_kind_map[cls.kind()] = cls
class Property(object):
"""A Property is an attribute of a Model.
It defines the type of the attribute, which determines how it is stored
in the datastore and how the property values are validated. Different property
types support different options, which change validation rules, default
values, etc. The simplest example of a property is a StringProperty:
class Story(db.Model):
title = db.StringProperty()
"""
creation_counter = 0
def __init__(self,
verbose_name=None,
name=None,
default=None,
required=False,
validator=None,
choices=None,
indexed=True):
"""Initializes this Property with the given options.
Args:
verbose_name: User friendly name of property.
name: Storage name for property. By default, uses attribute name
as it is assigned in the Model sub-class.
default: Default value for property if none is assigned.
required: Whether property is required.
validator: User provided method used for validation.
choices: User provided set of valid property values.
indexed: Whether property is indexed.
"""
self.verbose_name = verbose_name
self.name = name
self.default = default
self.required = required
self.validator = validator
self.choices = choices
self.indexed = indexed
self.creation_counter = Property.creation_counter
Property.creation_counter += 1
def __property_config__(self, model_class, property_name):
"""Configure property, connecting it to its model.
Configure the property so that it knows its property name and what class
it belongs to.
Args:
model_class: Model class which Property will belong to.
property_name: Name of property within Model instance to store property
values in. By default this will be the property name preceded by
an underscore, but may change for different subclasses.
"""
self.model_class = model_class
if self.name is None:
self.name = property_name
def __get__(self, model_instance, model_class):
"""Returns the value for this property on the given model instance.
See http://docs.python.org/ref/descriptors.html for a description of
the arguments to this class and what they mean."""
if model_instance is None:
return self
try:
return getattr(model_instance, self._attr_name())
except AttributeError:
return None
def __set__(self, model_instance, value):
"""Sets the value for this property on the given model instance.
See http://docs.python.org/ref/descriptors.html for a description of
the arguments to this class and what they mean.
"""
value = self.validate(value)
setattr(model_instance, self._attr_name(), value)
def default_value(self):
"""Default value for unassigned values.
Returns:
Default value as provided by __init__(default).
"""
return self.default
def validate(self, value):
"""Assert that provided value is compatible with this property.
Args:
value: Value to validate against this Property.
Returns:
A valid value, either the input unchanged or adapted to the
required type.
Raises:
BadValueError if the value is not appropriate for this
property in any way.
"""
if self.empty(value):
if self.required:
raise BadValueError('Property %s is required' % self.name)
else:
if self.choices:
match = False
for choice in self.choices:
if choice == value:
match = True
if not match:
raise BadValueError('Property %s is %r; must be one of %r' %
(self.name, value, self.choices))
if self.validator is not None:
self.validator(value)
return value
def empty(self, value):
"""Determine if value is empty in the context of this property.
For most kinds, this is equivalent to "not value", but for kinds like
bool, the test is more subtle, so subclasses can override this method
if necessary.
Args:
value: Value to validate against this Property.
Returns:
True if this value is considered empty in the context of this Property
type, otherwise False.
"""
return not value
def get_value_for_datastore(self, model_instance):
"""Datastore representation of this property.
Looks for this property in the given model instance, and returns the proper
datastore representation of the value that can be stored in a datastore
entity. Most critically, it will fetch the datastore key value for
reference properties.
Args:
model_instance: Instance to fetch datastore value from.
Returns:
Datastore representation of the model value in a form that is
appropriate for storing in the datastore.
"""
return self.__get__(model_instance, model_instance.__class__)
def make_value_from_datastore(self, value):
"""Native representation of this property.
Given a value retrieved from a datastore entity, return a value,
possibly converted, to be stored on the model instance. Usually
this returns the value unchanged, but a property class may
override this when it uses a different datatype on the model
instance than on the entity.
This API is not quite symmetric with get_value_for_datastore(),
because the model instance on which to store the converted value
may not exist yet -- we may be collecting values to be passed to a
model constructor.
Args:
value: value retrieved from the datastore entity.
Returns:
The value converted for use as a model instance attribute.
"""
return value
def _require_parameter(self, kwds, parameter, value):
"""Sets kwds[parameter] to value.
If kwds[parameter] exists and is not value, raises ConfigurationError.
Args:
kwds: The parameter dict, which maps parameter names (strings) to values.
parameter: The name of the parameter to set.
value: The value to set it to.
"""
if parameter in kwds and kwds[parameter] != value:
raise ConfigurationError('%s must be %s.' % (parameter, value))
kwds[parameter] = value
def _attr_name(self):
"""Attribute name we use for this property in model instances.
DO NOT USE THIS METHOD.
"""
return '_' + self.name
data_type = str
def datastore_type(self):
"""Deprecated backwards-compatible accessor method for self.data_type."""
return self.data_type
class Model(object):
"""Model is the superclass of all object entities in the datastore.
The programming model is to declare Python subclasses of the Model class,
declaring datastore properties as class members of that class. So if you want
to publish a story with title, body, and created date, you would do it like
this:
class Story(db.Model):
title = db.StringProperty()
body = db.TextProperty()
created = db.DateTimeProperty(auto_now_add=True)
A model instance can have a single parent. Model instances without any
parent are root entities. It is possible to efficiently query for
instances by their shared parent. All descendents of a single root
instance also behave as a transaction group. This means that when you
work one member of the group within a transaction all descendents of that
root join the transaction. All operations within a transaction on this
group are ACID.
"""
__metaclass__ = PropertiedClass
def __init__(self,
parent=None,
key_name=None,
_app=None,
_from_entity=False,
**kwds):
"""Creates a new instance of this model.
To create a new entity, you instantiate a model and then call put(),
which saves the entity to the datastore:
person = Person()
person.name = 'Bret'
person.put()
You can initialize properties in the model in the constructor with keyword
arguments:
person = Person(name='Bret')
We initialize all other properties to the default value (as defined by the
properties in the model definition) if they are not provided in the
constructor.
Args:
parent: Parent instance for this instance or None, indicating a top-
level instance.
key_name: Name for new model instance.
_from_entity: Intentionally undocumented.
kwds: Keyword arguments mapping to properties of model. Also:
key: Key instance for this instance, if provided makes parent and
key_name redundant (they do not need to be set but if they are
they must match the key).
"""
key = kwds.get('key', None)
if key is not None:
if isinstance(key, (tuple, list)):
key = Key.from_path(*key)
if isinstance(key, basestring):
key = Key(encoded=key)
if not isinstance(key, Key):
raise TypeError('Expected Key type; received %s (is %s)' %
(key, key.__class__.__name__))
if not key.has_id_or_name():
raise BadKeyError('Key must have an id or name')
if key.kind() != self.kind():
raise BadKeyError('Expected Key kind to be %s; received %s' %
(self.kind(), key.kind()))
if _app is not None and key.app() != _app:
raise BadKeyError('Expected Key app to be %s; received %s' %
(_app, key.app()))
if key_name and key_name != key.name():
raise BadArgumentError('Cannot use key and key_name at the same time'
' with different values')
if parent and parent != key.parent():
raise BadArgumentError('Cannot use key and parent at the same time'
' with different values')
self._key = key
self._key_name = None
self._parent = None
self._parent_key = None
else:
if key_name == '':
raise BadKeyError('Name cannot be empty.')
elif key_name is not None and not isinstance(key_name, basestring):
raise BadKeyError('Name must be string type, not %s' %
key_name.__class__.__name__)
if parent is not None:
if not isinstance(parent, (Model, Key)):
raise TypeError('Expected Model type; received %s (is %s)' %
(parent, parent.__class__.__name__))
if isinstance(parent, Model) and not parent.has_key():
raise BadValueError(
"%s instance must have a complete key before it can be used as a "
"parent." % parent.kind())
if isinstance(parent, Key):
self._parent_key = parent
self._parent = None
else:
self._parent_key = parent.key()
self._parent = parent
else:
self._parent_key = None
self._parent = None
self._key_name = key_name
self._key = None
self._entity = None
if _app is not None and isinstance(_app, Key):
raise BadArgumentError('_app should be a string; received Key(\'%s\'):\n'
' This may be the result of passing \'key\' as '
'a positional parameter in SDK 1.2.6. Please '
'only pass \'key\' as a keyword parameter.' % _app)
self._app = _app
for prop in self.properties().values():
if prop.name in kwds:
value = kwds[prop.name]
else:
value = prop.default_value()
try:
prop.__set__(self, value)
except DerivedPropertyError, e:
if prop.name in kwds and not _from_entity:
raise
def key(self):
"""Unique key for this entity.
This property is only available if this entity is already stored in the
datastore or if it has a full key, so it is available if this entity was
fetched returned from a query, or after put() is called the first time
for new entities, or if a complete key was given when constructed.
Returns:
Datastore key of persisted entity.
Raises:
NotSavedError when entity is not persistent.
"""
if self.is_saved():
return self._entity.key()
elif self._key:
return self._key
elif self._key_name:
parent = self._parent_key or (self._parent and self._parent.key())
self._key = Key.from_path(self.kind(), self._key_name, parent=parent)
return self._key
else:
raise NotSavedError()
def _to_entity(self, entity):
"""Copies information from this model to provided entity.
Args:
entity: Entity to save information on.
"""
for prop in self.properties().values():
datastore_value = prop.get_value_for_datastore(self)
if datastore_value == []:
try:
del entity[prop.name]
except KeyError:
pass
else:
entity[prop.name] = datastore_value
entity.set_unindexed_properties(self._unindexed_properties)
def _populate_internal_entity(self, _entity_class=datastore.Entity):
"""Populates self._entity, saving its state to the datastore.
After this method is called, calling is_saved() will return True.
Returns:
Populated self._entity
"""
self._entity = self._populate_entity(_entity_class=_entity_class)
for attr in ('_key_name', '_key'):
try:
delattr(self, attr)
except AttributeError:
pass
return self._entity
def put(self, **kwargs):
"""Writes this model instance to the datastore.
If this instance is new, we add an entity to the datastore.
Otherwise, we update this instance, and the key will remain the
same.
Returns:
The key of the instance (either the existing key or a new key).
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
self._populate_internal_entity()
return datastore.Put(self._entity, rpc=rpc)
save = put
def _populate_entity(self, _entity_class=datastore.Entity):
"""Internal helper -- Populate self._entity or create a new one
if that one does not exist. Does not change any state of the instance
other than the internal state of the entity.
This method is separate from _populate_internal_entity so that it is
possible to call to_xml without changing the state of an unsaved entity
to saved.
Returns:
self._entity or a new Entity which is not stored on the instance.
"""
if self.is_saved():
entity = self._entity
else:
kwds = {'_app': self._app,
'unindexed_properties': self._unindexed_properties}
if self._key is not None:
if self._key.id():
kwds['id'] = self._key.id()
else:
kwds['name'] = self._key.name()
if self._key.parent():
kwds['parent'] = self._key.parent()
else:
if self._key_name is not None:
kwds['name'] = self._key_name
if self._parent_key is not None:
kwds['parent'] = self._parent_key
elif self._parent is not None:
kwds['parent'] = self._parent._entity
entity = _entity_class(self.kind(), **kwds)
self._to_entity(entity)
return entity
def delete(self, **kwargs):
"""Deletes this entity from the datastore.
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
datastore.Delete(self.key(), rpc=rpc)
self._key = self.key()
self._key_name = None
self._parent_key = None
self._entity = None
def is_saved(self):
"""Determine if entity is persisted in the datastore.
New instances of Model do not start out saved in the data. Objects which
are saved to or loaded from the Datastore will have a True saved state.
Returns:
True if object has been persisted to the datastore, otherwise False.
"""
return self._entity is not None
def has_key(self):
"""Determine if this model instance has a complete key.
When not using a fully self-assigned Key, ids are not assigned until the
data is saved to the Datastore, but instances with a key name always have
a full key.
Returns:
True if the object has been persisted to the datastore or has a key
or has a key_name, otherwise False.
"""
return self.is_saved() or self._key or self._key_name
def dynamic_properties(self):
"""Returns a list of all dynamic properties defined for instance."""
return []
def instance_properties(self):
"""Alias for dyanmic_properties."""
return self.dynamic_properties()
def parent(self):
"""Get the parent of the model instance.
Returns:
Parent of contained entity or parent provided in constructor, None if
instance has no parent.
"""
if self._parent is None:
parent_key = self.parent_key()
if parent_key is not None:
self._parent = get(parent_key)
return self._parent
def parent_key(self):
"""Get the parent's key.
This method is useful for avoiding a potential fetch from the datastore
but still get information about the instances parent.
Returns:
Parent key of entity, None if there is no parent.
"""
if self._parent_key is not None:
return self._parent_key
elif self._parent is not None:
return self._parent.key()
elif self._entity is not None:
return self._entity.parent()
elif self._key is not None:
return self._key.parent()
else:
return None
def to_xml(self, _entity_class=datastore.Entity):
"""Generate an XML representation of this model instance.
atom and gd:namespace properties are converted to XML according to their
respective schemas. For more information, see:
http://www.atomenabled.org/developers/syndication/
http://code.google.com/apis/gdata/common-elements.html
"""
entity = self._populate_entity(_entity_class)
return entity.ToXml()
@classmethod
def get(cls, keys, **kwargs):
"""Fetch instance from the datastore of a specific Model type using key.
We support Key objects and string keys (we convert them to Key objects
automatically).
Useful for ensuring that specific instance types are retrieved from the
datastore. It also helps that the source code clearly indicates what
kind of object is being retreived. Example:
story = Story.get(story_key)
Args:
keys: Key within datastore entity collection to find; or string key;
or list of Keys or string keys.
Returns:
If a single key was given: a Model instance associated with key
for provided class if it exists in the datastore, otherwise
None; if a list of keys was given: a list whose items are either
a Model instance or None.
Raises:
KindError if any of the retreived objects are not instances of the
type associated with call to 'get'.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
results = get(keys, rpc=rpc)
if results is None:
return None
if isinstance(results, Model):
instances = [results]
else:
instances = results
for instance in instances:
if not(instance is None or isinstance(instance, cls)):
raise KindError('Kind %r is not a subclass of kind %r' %
(instance.kind(), cls.kind()))
return results
@classmethod
def get_by_key_name(cls, key_names, parent=None, **kwargs):
"""Get instance of Model class by its key's name.
Args:
key_names: A single key-name or a list of key-names.
parent: Parent of instances to get. Can be a model or key.
"""
try:
parent = _coerce_to_key(parent)
except BadKeyError, e:
raise BadArgumentError(str(e))
rpc = datastore.GetRpcFromKwargs(kwargs)
key_names, multiple = datastore.NormalizeAndTypeCheck(key_names, basestring)
keys = [datastore.Key.from_path(cls.kind(), name, parent=parent)
for name in key_names]
if multiple:
return get(keys, rpc=rpc)
else:
return get(keys[0], rpc=rpc)
@classmethod
def get_by_id(cls, ids, parent=None, **kwargs):
"""Get instance of Model class by id.
Args:
key_names: A single id or a list of ids.
parent: Parent of instances to get. Can be a model or key.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
if isinstance(parent, Model):
parent = parent.key()
ids, multiple = datastore.NormalizeAndTypeCheck(ids, (int, long))
keys = [datastore.Key.from_path(cls.kind(), id, parent=parent)
for id in ids]
if multiple:
return get(keys, rpc=rpc)
else:
return get(keys[0], rpc=rpc)
@classmethod
def get_or_insert(cls, key_name, **kwds):
"""Transactionally retrieve or create an instance of Model class.
This acts much like the Python dictionary setdefault() method, where we
first try to retrieve a Model instance with the given key name and parent.
If it's not present, then we create a new instance (using the *kwds
supplied) and insert that with the supplied key name.
Subsequent calls to this method with the same key_name and parent will
always yield the same entity (though not the same actual object instance),
regardless of the *kwds supplied. If the specified entity has somehow
been deleted separately, then the next call will create a new entity and
return it.
If the 'parent' keyword argument is supplied, it must be a Model instance.
It will be used as the parent of the new instance of this Model class if
one is created.
This method is especially useful for having just one unique entity for
a specific identifier. Insertion/retrieval is done transactionally, which
guarantees uniqueness.
Example usage:
class WikiTopic(db.Model):
creation_date = db.DatetimeProperty(auto_now_add=True)
body = db.TextProperty(required=True)
# The first time through we'll create the new topic.
wiki_word = 'CommonIdioms'
topic = WikiTopic.get_or_insert(wiki_word,
body='This topic is totally new!')
assert topic.key().name() == 'CommonIdioms'
assert topic.body == 'This topic is totally new!'
# The second time through will just retrieve the entity.
overwrite_topic = WikiTopic.get_or_insert(wiki_word,
body='A totally different message!')
assert topic.key().name() == 'CommonIdioms'
assert topic.body == 'This topic is totally new!'
Args:
key_name: Key name to retrieve or create.
**kwds: Keyword arguments to pass to the constructor of the model class
if an instance for the specified key name does not already exist. If
an instance with the supplied key_name and parent already exists, the
rest of these arguments will be discarded.
Returns:
Existing instance of Model class with the specified key_name and parent
or a new one that has just been created.
Raises:
TransactionFailedError if the specified Model instance could not be
retrieved or created transactionally (due to high contention, etc).
"""
def txn():
entity = cls.get_by_key_name(key_name, parent=kwds.get('parent'))
if entity is None:
entity = cls(key_name=key_name, **kwds)
entity.put()
return entity
return run_in_transaction(txn)
@classmethod
def all(cls, **kwds):
"""Returns a query over all instances of this model from the datastore.
Returns:
Query that will retrieve all instances from entity collection.
"""
return Query(cls, **kwds)
@classmethod
def gql(cls, query_string, *args, **kwds):
"""Returns a query using GQL query string.
See appengine/ext/gql for more information about GQL.
Args:
query_string: properly formatted GQL query string with the
'SELECT * FROM <entity>' part omitted
*args: rest of the positional arguments used to bind numeric references
in the query.
**kwds: dictionary-based arguments (for named parameters).
"""
return GqlQuery('SELECT * FROM %s %s' % (cls.kind(), query_string),
*args, **kwds)
@classmethod
def _load_entity_values(cls, entity):
"""Load dynamic properties from entity.
Loads attributes which are not defined as part of the entity in
to the model instance.
Args:
entity: Entity which contain values to search dyanmic properties for.
"""
entity_values = {}
for prop in cls.properties().values():
if prop.name in entity:
try:
value = prop.make_value_from_datastore(entity[prop.name])
entity_values[prop.name] = value
except KeyError:
entity_values[prop.name] = []
return entity_values
@classmethod
def from_entity(cls, entity):
"""Converts the entity representation of this model to an instance.
Converts datastore.Entity instance to an instance of cls.
Args:
entity: Entity loaded directly from datastore.
Raises:
KindError when cls is incorrect model for entity.
"""
if cls.kind() != entity.kind():
raise KindError('Class %s cannot handle kind \'%s\'' %
(repr(cls), entity.kind()))
entity_values = cls._load_entity_values(entity)
if entity.key().has_id_or_name():
entity_values['key'] = entity.key()
instance = cls(None, _from_entity=True, **entity_values)
if entity.is_saved():
instance._entity = entity
del instance._key_name
del instance._key
return instance
@classmethod
def kind(cls):
"""Returns the datastore kind we use for this model.
We just use the name of the model for now, ignoring potential collisions.
"""
return cls.__name__
@classmethod
def entity_type(cls):
"""Soon to be removed alias for kind."""
return cls.kind()
@classmethod
def properties(cls):
"""Returns a dictionary of all the properties defined for this model."""
return dict(cls._properties)
@classmethod
def fields(cls):
"""Soon to be removed alias for properties."""
return cls.properties()
def create_rpc(deadline=None, callback=None, read_policy=STRONG_CONSISTENCY):
"""Create an rpc for use in configuring datastore calls.
Args:
deadline: float, deadline for calls in seconds.
callback: callable, a callback triggered when this rpc completes,
accepts one argument: the returned rpc.
read_policy: flag, set to EVENTUAL_CONSISTENCY to enable eventually
consistent reads
Returns:
A datastore.DatastoreRPC instance.
"""
return datastore.CreateRPC(
deadline=deadline, callback=callback, read_policy=read_policy)
def get(keys, **kwargs):
"""Fetch the specific Model instance with the given key from the datastore.
We support Key objects and string keys (we convert them to Key objects
automatically).
Args:
keys: Key within datastore entity collection to find; or string key;
or list of Keys or string keys.
Returns:
If a single key was given: a Model instance associated with key
for if it exists in the datastore, otherwise None; if a list of
keys was given: a list whose items are either a Model instance or
None.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys)
try:
entities = datastore.Get(keys, rpc=rpc)
except datastore_errors.EntityNotFoundError:
assert not multiple
return None
models = []
for entity in entities:
if entity is None:
model = None
else:
cls1 = class_for_kind(entity.kind())
model = cls1.from_entity(entity)
models.append(model)
if multiple:
return models
assert len(models) == 1
return models[0]
def put(models, **kwargs):
"""Store one or more Model instances.
Args:
models: Model instance or list of Model instances.
Returns:
A Key or a list of Keys (corresponding to the argument's plurality).
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
models, multiple = datastore.NormalizeAndTypeCheck(models, Model)
entities = [model._populate_internal_entity() for model in models]
keys = datastore.Put(entities, rpc=rpc)
if multiple:
return keys
assert len(keys) == 1
return keys[0]
save = put
def delete(models, **kwargs):
"""Delete one or more Model instances.
Args:
models_or_keys: Model instance or list of Model instances.
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
if not isinstance(models, (list, tuple)):
models = [models]
keys = [_coerce_to_key(v) for v in models]
datastore.Delete(keys, rpc=rpc)
def allocate_ids(model, size, **kwargs):
"""Allocates a range of IDs of size for the model_key defined by model.
Allocates a range of IDs in the datastore such that those IDs will not
be automatically assigned to new entities. You can only allocate IDs
for model keys from your app. If there is an error, raises a subclass of
datastore_errors.Error.
Args:
model: Model instance, Key or string to serve as a template specifying the
ID sequence in which to allocate IDs. Returned ids should only be used
in entities with the same parent (if any) and kind as this key.
Returns:
(start, end) of the allocated range, inclusive.
"""
return datastore.AllocateIds(_coerce_to_key(model), size, **kwargs)
class Expando(Model):
"""Dynamically expandable model.
An Expando does not require (but can still benefit from) the definition
of any properties before it can be used to store information in the
datastore. Properties can be added to an expando object by simply
performing an assignment. The assignment of properties is done on
an instance by instance basis, so it is possible for one object of an
expando type to have different properties from another or even the same
properties with different types. It is still possible to define
properties on an expando, allowing those properties to behave the same
as on any other model.
Example:
import datetime
class Song(db.Expando):
title = db.StringProperty()
crazy = Song(title='Crazy like a diamond',
author='Lucy Sky',
publish_date='yesterday',
rating=5.0)
hoboken = Song(title='The man from Hoboken',
author=['Anthony', 'Lou'],
publish_date=datetime.datetime(1977, 5, 3))
crazy.last_minute_note=db.Text('Get a train to the station.')
Possible Uses:
One use of an expando is to create an object without any specific
structure and later, when your application mature and it in the right
state, change it to a normal model object and define explicit properties.
Additional exceptions for expando:
Protected attributes (ones whose names begin with '_') cannot be used
as dynamic properties. These are names that are reserved for protected
transient (non-persisted) attributes.
Order of lookup:
When trying to set or access an attribute value, any other defined
properties, such as methods and other values in __dict__ take precedence
over values in the datastore.
1 - Because it is not possible for the datastore to know what kind of
property to store on an undefined expando value, setting a property to
None is the same as deleting it from the expando.
2 - Persistent variables on Expando must not begin with '_'. These
variables considered to be 'protected' in Python, and are used
internally.
3 - Expando's dynamic properties are not able to store empty lists.
Attempting to assign an empty list to a dynamic property will raise
ValueError. Static properties on Expando can still support empty
lists but like normal Model properties is restricted from using
None.
"""
_dynamic_properties = None
def __init__(self, parent=None, key_name=None, _app=None, **kwds):
"""Creates a new instance of this expando model.
Args:
parent: Parent instance for this instance or None, indicating a top-
level instance.
key_name: Name for new model instance.
_app: Intentionally undocumented.
args: Keyword arguments mapping to properties of model.
"""
super(Expando, self).__init__(parent, key_name, _app, **kwds)
self._dynamic_properties = {}
for prop, value in kwds.iteritems():
if prop not in self.properties() and prop != 'key':
setattr(self, prop, value)
def __setattr__(self, key, value):
"""Dynamically set field values that are not defined.
Tries to set the value on the object normally, but failing that
sets the value on the contained entity.
Args:
key: Name of attribute.
value: Value to set for attribute. Must be compatible with
datastore.
Raises:
ValueError on attempt to assign empty list.
"""
check_reserved_word(key)
if (key[:1] != '_' and
not hasattr(getattr(type(self), key, None), '__set__')):
if value == []:
raise ValueError('Cannot store empty list to dynamic property %s' %
key)
if type(value) not in _ALLOWED_EXPANDO_PROPERTY_TYPES:
raise TypeError("Expando cannot accept values of type '%s'." %
type(value).__name__)
if self._dynamic_properties is None:
self._dynamic_properties = {}
self._dynamic_properties[key] = value
else:
super(Expando, self).__setattr__(key, value)
def __getattribute__(self, key):
"""Get attribute from expando.
Must be overridden to allow dynamic properties to obscure class attributes.
Since all attributes are stored in self._dynamic_properties, the normal
__getattribute__ does not attempt to access it until __setattr__ is called.
By then, the static attribute being overwritten has already been located
and returned from the call.
This method short circuits the usual __getattribute__ call when finding a
dynamic property and returns it to the user via __getattr__. __getattr__
is called to preserve backward compatibility with older Expando models
that may have overridden the original __getattr__.
NOTE: Access to properties defined by Python descriptors are not obscured
because setting those attributes are done through the descriptor and does
not place those attributes in self._dynamic_properties.
"""
if not key.startswith('_'):
dynamic_properties = self._dynamic_properties
if dynamic_properties is not None and key in dynamic_properties:
return self.__getattr__(key)
return super(Expando, self).__getattribute__(key)
def __getattr__(self, key):
"""If no explicit attribute defined, retrieve value from entity.
Tries to get the value on the object normally, but failing that
retrieves value from contained entity.
Args:
key: Name of attribute.
Raises:
AttributeError when there is no attribute for key on object or
contained entity.
"""
_dynamic_properties = self._dynamic_properties
if _dynamic_properties is not None and key in _dynamic_properties:
return _dynamic_properties[key]
else:
return getattr(super(Expando, self), key)
def __delattr__(self, key):
"""Remove attribute from expando.
Expando is not like normal entities in that undefined fields
can be removed.
Args:
key: Dynamic property to be deleted.
"""
if self._dynamic_properties and key in self._dynamic_properties:
del self._dynamic_properties[key]
else:
object.__delattr__(self, key)
def dynamic_properties(self):
"""Determine which properties are particular to instance of entity.
Returns:
Set of names which correspond only to the dynamic properties.
"""
if self._dynamic_properties is None:
return []
return self._dynamic_properties.keys()
def _to_entity(self, entity):
"""Store to entity, deleting dynamic properties that no longer exist.
When the expando is saved, it is possible that a given property no longer
exists. In this case, the property will be removed from the saved instance.
Args:
entity: Entity which will receive dynamic properties.
"""
super(Expando, self)._to_entity(entity)
if self._dynamic_properties is None:
self._dynamic_properties = {}
for key, value in self._dynamic_properties.iteritems():
entity[key] = value
all_properties = set(self._dynamic_properties.iterkeys())
all_properties.update(self.properties().iterkeys())
for key in entity.keys():
if key not in all_properties:
del entity[key]
@classmethod
def _load_entity_values(cls, entity):
"""Load dynamic properties from entity.
Expando needs to do a second pass to add the entity values which were
ignored by Model because they didn't have an corresponding predefined
property on the model.
Args:
entity: Entity which contain values to search dyanmic properties for.
"""
entity_values = super(Expando, cls)._load_entity_values(entity)
for key, value in entity.iteritems():
if key not in entity_values:
entity_values[str(key)] = value
return entity_values
class _BaseQuery(object):
"""Base class for both Query and GqlQuery."""
_compile = False
def __init__(self, model_class=None, keys_only=False, compile=True,
cursor=None):
"""Constructor.
Args:
model_class: Model class from which entities are constructed.
keys_only: Whether the query should return full entities or only keys.
compile: Whether the query should also return a compiled query.
cursor: A compiled query from which to resume.
"""
self._model_class = model_class
self._keys_only = keys_only
self._compile = compile
self.with_cursor(cursor)
def is_keys_only(self):
"""Returns whether this query is keys only.
Returns:
True if this query returns keys, False if it returns entities.
"""
return self._keys_only
def _get_query(self):
"""Subclass must override (and not call their super method).
Returns:
A datastore.Query instance representing the query.
"""
raise NotImplementedError
def run(self, **kwargs):
"""Iterator for this query.
If you know the number of results you need, consider fetch() instead,
or use a GQL query with a LIMIT clause. It's more efficient.
Args:
rpc: datastore.DatastoreRPC to use for this request.
Returns:
Iterator for this query.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
raw_query = self._get_query()
iterator = raw_query.Run(rpc=rpc)
if self._compile:
self._last_raw_query = raw_query
if self._keys_only:
return iterator
else:
return _QueryIterator(self._model_class, iter(iterator))
def __iter__(self):
"""Iterator for this query.
If you know the number of results you need, consider fetch() instead,
or use a GQL query with a LIMIT clause. It's more efficient.
"""
return self.run()
def get(self, **kwargs):
"""Get first result from this.
Beware: get() ignores the LIMIT clause on GQL queries.
Returns:
First result from running the query if there are any, else None.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
results = self.fetch(1, rpc=rpc)
try:
return results[0]
except IndexError:
return None
def count(self, limit=None, **kwargs):
"""Number of entities this query fetches.
Beware: count() ignores the LIMIT clause on GQL queries.
Args:
limit, a number. If there are more results than this, stop short and
just return this number. Providing this argument makes the count
operation more efficient.
Returns:
Number of entities this query fetches.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
raw_query = self._get_query()
result = raw_query.Count(limit=limit, rpc=rpc)
self._last_raw_query = None
return result
def fetch(self, limit, offset=0, **kwargs):
"""Return a list of items selected using SQL-like limit and offset.
Whenever possible, use fetch() instead of iterating over the query
results with run() or __iter__() . fetch() is more efficient.
Beware: fetch() ignores the LIMIT clause on GQL queries.
Args:
limit: Maximum number of results to return.
offset: Optional number of results to skip first; default zero.
rpc: datastore.DatastoreRPC to use for this request.
Returns:
A list of db.Model instances. There may be fewer than 'limit'
results if there aren't enough results to satisfy the request.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
accepted = (int, long)
if not (isinstance(limit, accepted) and isinstance(offset, accepted)):
raise TypeError('Arguments to fetch() must be integers')
if limit < 0 or offset < 0:
raise ValueError('Arguments to fetch() must be >= 0')
if limit == 0:
return []
raw_query = self._get_query()
raw = raw_query.Get(limit, offset, rpc=rpc)
if self._compile:
self._last_raw_query = raw_query
if self._keys_only:
return raw
else:
if self._model_class is not None:
return [self._model_class.from_entity(e) for e in raw]
else:
return [class_for_kind(e.kind()).from_entity(e) for e in raw]
def cursor(self):
"""Get a serialized cursor for an already executed query.
The returned cursor effectively lets a future invocation of a similar
query to begin fetching results immediately after the last returned
result from this query invocation.
Returns:
A base64-encoded serialized cursor.
"""
if not self._compile:
raise AssertionError(
'Query must be created with compile=True to produce cursors')
try:
return base64.urlsafe_b64encode(
self._last_raw_query.GetCompiledCursor().Encode())
except AttributeError:
raise AssertionError('No cursor available.')
def with_cursor(self, cursor):
"""Set the start of this query to the given serialized cursor.
When executed, this query will start from the next result for a previous
invocation of a similar query.
Returns:
This Query instance, for chaining.
"""
if not cursor:
cursor = None
elif not isinstance(cursor, basestring):
raise BadValueError(
'Cursor must be a str or unicode instance, not a %s'
% type(cursor).__name__)
else:
cursor = str(cursor)
try:
decoded = base64.urlsafe_b64decode(cursor)
cursor = datastore_pb.CompiledCursor(decoded)
except (ValueError, TypeError), e:
raise datastore_errors.BadValueError(
'Invalid cursor %s. Details: %s' % (cursor, e))
except Exception, e:
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
raise datastore_errors.BadValueError('Invalid cursor %s. '
'Details: %s' % (cursor, e))
else:
raise
self._cursor = cursor
return self
def __getitem__(self, arg):
"""Support for query[index] and query[start:stop].
Beware: this ignores the LIMIT clause on GQL queries.
Args:
arg: Either a single integer, corresponding to the query[index]
syntax, or a Python slice object, corresponding to the
query[start:stop] or query[start:stop:step] syntax.
Returns:
A single Model instance when the argument is a single integer.
A list of Model instances when the argument is a slice.
"""
if isinstance(arg, slice):
start, stop, step = arg.start, arg.stop, arg.step
if start is None:
start = 0
if stop is None:
raise ValueError('Open-ended slices are not supported')
if step is None:
step = 1
if start < 0 or stop < 0 or step != 1:
raise ValueError(
'Only slices with start>=0, stop>=0, step==1 are supported')
limit = stop - start
if limit < 0:
return []
return self.fetch(limit, start)
elif isinstance(arg, (int, long)):
if arg < 0:
raise ValueError('Only indices >= 0 are supported')
results = self.fetch(1, arg)
if results:
return results[0]
else:
raise IndexError('The query returned fewer than %d results' % (arg+1))
else:
raise TypeError('Only integer indices and slices are supported')
class _QueryIterator(object):
"""Wraps the datastore iterator to return Model instances.
The datastore returns entities. We wrap the datastore iterator to
return Model instances instead.
"""
def __init__(self, model_class, datastore_iterator):
"""Iterator constructor
Args:
model_class: Model class from which entities are constructed.
datastore_iterator: Underlying datastore iterator.
"""
self.__model_class = model_class
self.__iterator = datastore_iterator
def __iter__(self):
"""Iterator on self.
Returns:
Self.
"""
return self
def next(self):
"""Get next Model instance in query results.
Returns:
Next model instance.
Raises:
StopIteration when there are no more results in query.
"""
if self.__model_class is not None:
return self.__model_class.from_entity(self.__iterator.next())
else:
entity = self.__iterator.next()
return class_for_kind(entity.kind()).from_entity(entity)
def _normalize_query_parameter(value):
"""Make any necessary type conversions to a query parameter.
The following conversions are made:
- Model instances are converted to Key instances. This is necessary so
that querying reference properties will work.
- datetime.date objects are converted to datetime.datetime objects (see
_date_to_datetime for details on this conversion). This is necessary so
that querying date properties with date objects will work.
- datetime.time objects are converted to datetime.datetime objects (see
_time_to_datetime for details on this conversion). This is necessary so
that querying time properties with time objects will work.
Args:
value: The query parameter value.
Returns:
The input value, or a converted value if value matches one of the
conversions specified above.
"""
if isinstance(value, Model):
value = value.key()
if (isinstance(value, datetime.date) and
not isinstance(value, datetime.datetime)):
value = _date_to_datetime(value)
elif isinstance(value, datetime.time):
value = _time_to_datetime(value)
return value
class Query(_BaseQuery):
"""A Query instance queries over instances of Models.
You construct a query with a model class, like this:
class Story(db.Model):
title = db.StringProperty()
date = db.DateTimeProperty()
query = Query(Story)
You modify a query with filters and orders like this:
query.filter('title =', 'Foo')
query.order('-date')
query.ancestor(key_or_model_instance)
Every query can return an iterator, so you access the results of a query
by iterating over it:
for story in query:
print story.title
For convenience, all of the filtering and ordering methods return "self",
so the easiest way to use the query interface is to cascade all filters and
orders in the iterator line like this:
for story in Query(story).filter('title =', 'Foo').order('-date'):
print story.title
"""
def __init__(self, model_class=None, keys_only=False, cursor=None):
"""Constructs a query over instances of the given Model.
Args:
model_class: Model class to build query for.
keys_only: Whether the query should return full entities or only keys.
cursor: A compiled query from which to resume.
"""
super(Query, self).__init__(model_class, keys_only, cursor=cursor)
self.__query_sets = [{}]
self.__orderings = []
self.__ancestor = None
def _get_query(self,
_query_class=datastore.Query,
_multi_query_class=datastore.MultiQuery):
queries = []
for query_set in self.__query_sets:
if self._model_class is not None:
kind = self._model_class.kind()
else:
kind = None
query = _query_class(kind,
query_set,
keys_only=self._keys_only,
compile=self._compile,
cursor=self._cursor)
query.Order(*self.__orderings)
if self.__ancestor is not None:
query.Ancestor(self.__ancestor)
queries.append(query)
if (_query_class != datastore.Query and
_multi_query_class == datastore.MultiQuery):
warnings.warn(
'Custom _query_class specified without corresponding custom'
' _query_multi_class. Things will break if you use queries with'
' the "IN" or "!=" operators.', RuntimeWarning)
if len(queries) > 1:
raise datastore_errors.BadArgumentError(
'Query requires multiple subqueries to satisfy. If _query_class'
' is overridden, _multi_query_class must also be overridden.')
elif (_query_class == datastore.Query and
_multi_query_class != datastore.MultiQuery):
raise BadArgumentError('_query_class must also be overridden if'
' _multi_query_class is overridden.')
if len(queries) == 1:
return queries[0]
else:
return _multi_query_class(queries, self.__orderings)
def __filter_disjunction(self, operations, values):
"""Add a disjunction of several filters and several values to the query.
This is implemented by duplicating queries and combining the
results later.
Args:
operations: a string or list of strings. Each string contains a
property name and an operator to filter by. The operators
themselves must not require multiple queries to evaluate
(currently, this means that 'in' and '!=' are invalid).
values: a value or list of filter values, normalized by
_normalize_query_parameter.
"""
if not isinstance(operations, (list, tuple)):
operations = [operations]
if not isinstance(values, (list, tuple)):
values = [values]
new_query_sets = []
for operation in operations:
if operation.lower().endswith('in') or operation.endswith('!='):
raise BadQueryError('Cannot use "in" or "!=" in a disjunction.')
for query_set in self.__query_sets:
for value in values:
new_query_set = copy.deepcopy(query_set)
datastore._AddOrAppend(new_query_set, operation, value)
new_query_sets.append(new_query_set)
self.__query_sets = new_query_sets
def filter(self, property_operator, value):
"""Add filter to query.
Args:
property_operator: string with the property and operator to filter by.
value: the filter value.
Returns:
Self to support method chaining.
Raises:
PropertyError if invalid property is provided.
"""
match = _FILTER_REGEX.match(property_operator)
prop = match.group(1)
if match.group(3) is not None:
operator = match.group(3)
else:
operator = '=='
if self._model_class is None:
if prop != datastore_types._KEY_SPECIAL_PROPERTY:
raise BadQueryError(
'Only %s filters are allowed on kindless queries.' %
datastore_types._KEY_SPECIAL_PROPERTY)
elif prop in self._model_class._unindexed_properties:
raise PropertyError('Property \'%s\' is not indexed' % prop)
if operator.lower() == 'in':
if self._keys_only:
raise BadQueryError('Keys only queries do not support IN filters.')
elif not isinstance(value, (list, tuple)):
raise BadValueError('Argument to the "in" operator must be a list')
values = [_normalize_query_parameter(v) for v in value]
self.__filter_disjunction(prop + ' =', values)
else:
if isinstance(value, (list, tuple)):
raise BadValueError('Filtering on lists is not supported')
if operator == '!=':
if self._keys_only:
raise BadQueryError('Keys only queries do not support != filters.')
self.__filter_disjunction([prop + ' <', prop + ' >'],
_normalize_query_parameter(value))
else:
value = _normalize_query_parameter(value)
for query_set in self.__query_sets:
datastore._AddOrAppend(query_set, property_operator, value)
return self
def order(self, property):
"""Set order of query result.
To use descending order, prepend '-' (minus) to the property
name, e.g., '-date' rather than 'date'.
Args:
property: Property to sort on.
Returns:
Self to support method chaining.
Raises:
PropertyError if invalid property is provided.
"""
if property.startswith('-'):
property = property[1:]
order = datastore.Query.DESCENDING
else:
order = datastore.Query.ASCENDING
if self._model_class is None:
if (property != datastore_types._KEY_SPECIAL_PROPERTY or
order != datastore.Query.ASCENDING):
raise BadQueryError(
'Only %s ascending orders are supported on kindless queries' %
datastore_types._KEY_SPECIAL_PROPERTY)
else:
if not issubclass(self._model_class, Expando):
if (property not in self._model_class.properties() and
property not in datastore_types._SPECIAL_PROPERTIES):
raise PropertyError('Invalid property name \'%s\'' % property)
if property in self._model_class._unindexed_properties:
raise PropertyError('Property \'%s\' is not indexed' % property)
self.__orderings.append((property, order))
return self
def ancestor(self, ancestor):
"""Sets an ancestor for this query.
This restricts the query to only return results that descend from
a given model instance. In other words, all of the results will
have the ancestor as their parent, or parent's parent, etc. The
ancestor itself is also a possible result!
Args:
ancestor: Model or Key (that has already been saved)
Returns:
Self to support method chaining.
Raises:
TypeError if the argument isn't a Key or Model; NotSavedError
if it is, but isn't saved yet.
"""
if isinstance(ancestor, datastore.Key):
if ancestor.has_id_or_name():
self.__ancestor = ancestor
else:
raise NotSavedError()
elif isinstance(ancestor, Model):
if ancestor.has_key():
self.__ancestor = ancestor.key()
else:
raise NotSavedError()
else:
raise TypeError('ancestor should be Key or Model')
return self
class GqlQuery(_BaseQuery):
"""A Query class that uses GQL query syntax instead of .filter() etc."""
def __init__(self, query_string, *args, **kwds):
"""Constructor.
Args:
query_string: Properly formatted GQL query string.
*args: Positional arguments used to bind numeric references in the query.
**kwds: Dictionary-based arguments for named references.
Raises:
PropertyError if the query filters or sorts on a property that's not
indexed.
"""
from google.appengine.ext import gql
app = kwds.pop('_app', None)
self._proto_query = gql.GQL(query_string, _app=app)
if self._proto_query._entity is not None:
model_class = class_for_kind(self._proto_query._entity)
else:
model_class = None
super(GqlQuery, self).__init__(model_class,
keys_only=self._proto_query._keys_only)
if model_class is not None:
for property, unused in (self._proto_query.filters().keys() +
self._proto_query.orderings()):
if property in model_class._unindexed_properties:
raise PropertyError('Property \'%s\' is not indexed' % property)
self.bind(*args, **kwds)
def bind(self, *args, **kwds):
"""Bind arguments (positional or keyword) to the query.
Note that you can also pass arguments directly to the query
constructor. Each time you call bind() the previous set of
arguments is replaced with the new set. This is useful because
the hard work in in parsing the query; so if you expect to be
using the same query with different sets of arguments, you should
hold on to the GqlQuery() object and call bind() on it each time.
Args:
*args: Positional arguments used to bind numeric references in the query.
**kwds: Dictionary-based arguments for named references.
"""
self._args = []
for arg in args:
self._args.append(_normalize_query_parameter(arg))
self._kwds = {}
for name, arg in kwds.iteritems():
self._kwds[name] = _normalize_query_parameter(arg)
def run(self, **kwargs):
"""Iterator for this query that handles the LIMIT clause property.
If the GQL query string contains a LIMIT clause, this function fetches
all results before returning an iterator. Otherwise results are retrieved
in batches by the iterator.
Args:
rpc: datastore.DatastoreRPC to use for this request.
Returns:
Iterator for this query.
"""
if self._proto_query.limit() >= 0:
return iter(self.fetch(limit=self._proto_query.limit(),
offset=self._proto_query.offset(),
**kwargs))
else:
results = _BaseQuery.run(self, **kwargs)
try:
for _ in xrange(self._proto_query.offset()):
results.next()
except StopIteration:
pass
return results
def _get_query(self):
return self._proto_query.Bind(self._args, self._kwds, self._cursor)
class UnindexedProperty(Property):
"""A property that isn't indexed by either built-in or composite indices.
TextProperty and BlobProperty derive from this class.
"""
def __init__(self, *args, **kwds):
"""Construct property. See the Property class for details.
Raises:
ConfigurationError if indexed=True.
"""
self._require_parameter(kwds, 'indexed', False)
kwds['indexed'] = True
super(UnindexedProperty, self).__init__(*args, **kwds)
def validate(self, value):
"""Validate property.
Returns:
A valid value.
Raises:
BadValueError if property is not an instance of data_type.
"""
if value is not None and not isinstance(value, self.data_type):
try:
value = self.data_type(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a %s instance (%s)' %
(self.name, self.data_type.__name__, err))
value = super(UnindexedProperty, self).validate(value)
if value is not None and not isinstance(value, self.data_type):
raise BadValueError('Property %s must be a %s instance' %
(self.name, self.data_type.__name__))
return value
class TextProperty(UnindexedProperty):
"""A string that can be longer than 500 bytes."""
data_type = Text
class StringProperty(Property):
"""A textual property, which can be multi- or single-line."""
def __init__(self, verbose_name=None, multiline=False, **kwds):
"""Construct string property.
Args:
verbose_name: Verbose name is always first parameter.
multi-line: Carriage returns permitted in property.
"""
super(StringProperty, self).__init__(verbose_name, **kwds)
self.multiline = multiline
def validate(self, value):
"""Validate string property.
Returns:
A valid value.
Raises:
BadValueError if property is not multi-line but value is.
"""
value = super(StringProperty, self).validate(value)
if value is not None and not isinstance(value, basestring):
raise BadValueError(
'Property %s must be a str or unicode instance, not a %s'
% (self.name, type(value).__name__))
if not self.multiline and value and value.find('\n') != -1:
raise BadValueError('Property %s is not multi-line' % self.name)
return value
data_type = basestring
class _CoercingProperty(Property):
"""A Property subclass that extends validate() to coerce to self.data_type."""
def validate(self, value):
"""Coerce values (except None) to self.data_type.
Args:
value: The value to be validated and coerced.
Returns:
The coerced and validated value. It is guaranteed that this is
either None or an instance of self.data_type; otherwise an exception
is raised.
Raises:
BadValueError if the value could not be validated or coerced.
"""
value = super(_CoercingProperty, self).validate(value)
if value is not None and not isinstance(value, self.data_type):
value = self.data_type(value)
return value
class CategoryProperty(_CoercingProperty):
"""A property whose values are Category instances."""
data_type = Category
class LinkProperty(_CoercingProperty):
"""A property whose values are Link instances."""
def validate(self, value):
value = super(LinkProperty, self).validate(value)
if value is not None:
scheme, netloc, path, query, fragment = urlparse.urlsplit(value)
if not scheme or not netloc:
raise BadValueError('Property %s must be a full URL (\'%s\')' %
(self.name, value))
return value
data_type = Link
URLProperty = LinkProperty
class EmailProperty(_CoercingProperty):
"""A property whose values are Email instances."""
data_type = Email
class GeoPtProperty(_CoercingProperty):
"""A property whose values are GeoPt instances."""
data_type = GeoPt
class IMProperty(_CoercingProperty):
"""A property whose values are IM instances."""
data_type = IM
class PhoneNumberProperty(_CoercingProperty):
"""A property whose values are PhoneNumber instances."""
data_type = PhoneNumber
class PostalAddressProperty(_CoercingProperty):
"""A property whose values are PostalAddress instances."""
data_type = PostalAddress
class BlobProperty(UnindexedProperty):
"""A byte string that can be longer than 500 bytes."""
data_type = Blob
class ByteStringProperty(Property):
"""A short (<=500 bytes) byte string.
This type should be used for short binary values that need to be indexed. If
you do not require indexing (regardless of length), use BlobProperty instead.
"""
def validate(self, value):
"""Validate ByteString property.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'ByteString'.
"""
if value is not None and not isinstance(value, ByteString):
try:
value = ByteString(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a ByteString instance (%s)' % (self.name, err))
value = super(ByteStringProperty, self).validate(value)
if value is not None and not isinstance(value, ByteString):
raise BadValueError('Property %s must be a ByteString instance'
% self.name)
return value
data_type = ByteString
class DateTimeProperty(Property):
"""The base class of all of our date/time properties.
We handle common operations, like converting between time tuples and
datetime instances.
"""
def __init__(self, verbose_name=None, auto_now=False, auto_now_add=False,
**kwds):
"""Construct a DateTimeProperty
Args:
verbose_name: Verbose name is always first parameter.
auto_now: Date/time property is updated with the current time every time
it is saved to the datastore. Useful for properties that want to track
the modification time of an instance.
auto_now_add: Date/time is set to the when its instance is created.
Useful for properties that record the creation time of an entity.
"""
super(DateTimeProperty, self).__init__(verbose_name, **kwds)
self.auto_now = auto_now
self.auto_now_add = auto_now_add
def validate(self, value):
"""Validate datetime.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'datetime'.
"""
value = super(DateTimeProperty, self).validate(value)
if value and not isinstance(value, self.data_type):
raise BadValueError('Property %s must be a %s' %
(self.name, self.data_type.__name__))
return value
def default_value(self):
"""Default value for datetime.
Returns:
value of now() as appropriate to the date-time instance if auto_now
or auto_now_add is set, else user configured default value implementation.
"""
if self.auto_now or self.auto_now_add:
return self.now()
return Property.default_value(self)
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
now() as appropriate to the date-time instance in the odd case where
auto_now is set to True, else the default implementation.
"""
if self.auto_now:
return self.now()
else:
return super(DateTimeProperty,
self).get_value_for_datastore(model_instance)
data_type = datetime.datetime
@staticmethod
def now():
"""Get now as a full datetime value.
Returns:
'now' as a whole timestamp, including both time and date.
"""
return datetime.datetime.now()
def _date_to_datetime(value):
"""Convert a date to a datetime for datastore storage.
Args:
value: A datetime.date object.
Returns:
A datetime object with time set to 0:00.
"""
assert isinstance(value, datetime.date)
return datetime.datetime(value.year, value.month, value.day)
def _time_to_datetime(value):
"""Convert a time to a datetime for datastore storage.
Args:
value: A datetime.time object.
Returns:
A datetime object with date set to 1970-01-01.
"""
assert isinstance(value, datetime.time)
return datetime.datetime(1970, 1, 1,
value.hour, value.minute, value.second,
value.microsecond)
class DateProperty(DateTimeProperty):
"""A date property, which stores a date without a time."""
@staticmethod
def now():
"""Get now as a date datetime value.
Returns:
'date' part of 'now' only.
"""
return datetime.datetime.now().date()
def validate(self, value):
"""Validate date.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'date',
or if it is an instance of 'datetime' (which is a subclass
of 'date', but for all practical purposes a different type).
"""
value = super(DateProperty, self).validate(value)
if isinstance(value, datetime.datetime):
raise BadValueError('Property %s must be a %s, not a datetime' %
(self.name, self.data_type.__name__))
return value
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
We retrieve a datetime.date from the model instance and return a
datetime.datetime instance with the time set to zero.
See base class method documentation for details.
"""
value = super(DateProperty, self).get_value_for_datastore(model_instance)
if value is not None:
assert isinstance(value, datetime.date)
value = _date_to_datetime(value)
return value
def make_value_from_datastore(self, value):
"""Native representation of this property.
We receive a datetime.datetime retrieved from the entity and return
a datetime.date instance representing its date portion.
See base class method documentation for details.
"""
if value is not None:
assert isinstance(value, datetime.datetime)
value = value.date()
return value
data_type = datetime.date
class TimeProperty(DateTimeProperty):
"""A time property, which stores a time without a date."""
@staticmethod
def now():
"""Get now as a time datetime value.
Returns:
'time' part of 'now' only.
"""
return datetime.datetime.now().time()
def empty(self, value):
"""Is time property empty.
"0:0" (midnight) is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
We retrieve a datetime.time from the model instance and return a
datetime.datetime instance with the date set to 1/1/1970.
See base class method documentation for details.
"""
value = super(TimeProperty, self).get_value_for_datastore(model_instance)
if value is not None:
assert isinstance(value, datetime.time), repr(value)
value = _time_to_datetime(value)
return value
def make_value_from_datastore(self, value):
"""Native representation of this property.
We receive a datetime.datetime retrieved from the entity and return
a datetime.date instance representing its time portion.
See base class method documentation for details.
"""
if value is not None:
assert isinstance(value, datetime.datetime)
value = value.time()
return value
data_type = datetime.time
class IntegerProperty(Property):
"""An integer property."""
def validate(self, value):
"""Validate integer property.
Returns:
A valid value.
Raises:
BadValueError if value is not an integer or long instance.
"""
value = super(IntegerProperty, self).validate(value)
if value is None:
return value
if not isinstance(value, (int, long)) or isinstance(value, bool):
raise BadValueError('Property %s must be an int or long, not a %s'
% (self.name, type(value).__name__))
if value < -0x8000000000000000 or value > 0x7fffffffffffffff:
raise BadValueError('Property %s must fit in 64 bits' % self.name)
return value
data_type = int
def empty(self, value):
"""Is integer property empty.
0 is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class RatingProperty(_CoercingProperty, IntegerProperty):
"""A property whose values are Rating instances."""
data_type = Rating
class FloatProperty(Property):
"""A float property."""
def validate(self, value):
"""Validate float.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'float'.
"""
value = super(FloatProperty, self).validate(value)
if value is not None and not isinstance(value, float):
raise BadValueError('Property %s must be a float' % self.name)
return value
data_type = float
def empty(self, value):
"""Is float property empty.
0.0 is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class BooleanProperty(Property):
"""A boolean property."""
def validate(self, value):
"""Validate boolean.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'bool'.
"""
value = super(BooleanProperty, self).validate(value)
if value is not None and not isinstance(value, bool):
raise BadValueError('Property %s must be a bool' % self.name)
return value
data_type = bool
def empty(self, value):
"""Is boolean property empty.
False is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class UserProperty(Property):
"""A user property."""
def __init__(self,
verbose_name=None,
name=None,
required=False,
validator=None,
choices=None,
auto_current_user=False,
auto_current_user_add=False,
indexed=True):
"""Initializes this Property with the given options.
Note: this does *not* support the 'default' keyword argument.
Use auto_current_user_add=True instead.
Args:
verbose_name: User friendly name of property.
name: Storage name for property. By default, uses attribute name
as it is assigned in the Model sub-class.
required: Whether property is required.
validator: User provided method used for validation.
choices: User provided set of valid property values.
auto_current_user: If true, the value is set to the current user
each time the entity is written to the datastore.
auto_current_user_add: If true, the value is set to the current user
the first time the entity is written to the datastore.
indexed: Whether property is indexed.
"""
super(UserProperty, self).__init__(verbose_name, name,
required=required,
validator=validator,
choices=choices,
indexed=indexed)
self.auto_current_user = auto_current_user
self.auto_current_user_add = auto_current_user_add
def validate(self, value):
"""Validate user.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'User'.
"""
value = super(UserProperty, self).validate(value)
if value is not None and not isinstance(value, users.User):
raise BadValueError('Property %s must be a User' % self.name)
return value
def default_value(self):
"""Default value for user.
Returns:
Value of users.get_current_user() if auto_current_user or
auto_current_user_add is set; else None. (But *not* the default
implementation, since we don't support the 'default' keyword
argument.)
"""
if self.auto_current_user or self.auto_current_user_add:
return users.get_current_user()
return None
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
Value of users.get_current_user() if auto_current_user is set;
else the default implementation.
"""
if self.auto_current_user:
return users.get_current_user()
return super(UserProperty, self).get_value_for_datastore(model_instance)
data_type = users.User
class ListProperty(Property):
"""A property that stores a list of things.
This is a parameterized property; the parameter must be a valid
non-list data type, and all items must conform to this type.
"""
def __init__(self, item_type, verbose_name=None, default=None, **kwds):
"""Construct ListProperty.
Args:
item_type: Type for the list items; must be one of the allowed property
types.
verbose_name: Optional verbose name.
default: Optional default value; if omitted, an empty list is used.
**kwds: Optional additional keyword arguments, passed to base class.
Note that the only permissible value for 'required' is True.
"""
if item_type is str:
item_type = basestring
if not isinstance(item_type, type):
raise TypeError('Item type should be a type object')
if item_type not in _ALLOWED_PROPERTY_TYPES:
raise ValueError('Item type %s is not acceptable' % item_type.__name__)
if issubclass(item_type, (Blob, Text)):
self._require_parameter(kwds, 'indexed', False)
kwds['indexed'] = True
self._require_parameter(kwds, 'required', True)
if default is None:
default = []
self.item_type = item_type
super(ListProperty, self).__init__(verbose_name,
default=default,
**kwds)
def validate(self, value):
"""Validate list.
Returns:
A valid value.
Raises:
BadValueError if property is not a list whose items are instances of
the item_type given to the constructor.
"""
value = super(ListProperty, self).validate(value)
if value is not None:
if not isinstance(value, list):
raise BadValueError('Property %s must be a list' % self.name)
value = self.validate_list_contents(value)
return value
def validate_list_contents(self, value):
"""Validates that all items in the list are of the correct type.
Returns:
The validated list.
Raises:
BadValueError if the list has items are not instances of the
item_type given to the constructor.
"""
if self.item_type in (int, long):
item_type = (int, long)
else:
item_type = self.item_type
for item in value:
if not isinstance(item, item_type):
if item_type == (int, long):
raise BadValueError('Items in the %s list must all be integers.' %
self.name)
else:
raise BadValueError(
'Items in the %s list must all be %s instances' %
(self.name, self.item_type.__name__))
return value
def empty(self, value):
"""Is list property empty.
[] is not an empty value.
Returns:
True if value is None, else false.
"""
return value is None
data_type = list
def default_value(self):
"""Default value for list.
Because the property supplied to 'default' is a static value,
that value must be shallow copied to prevent all fields with
default values from sharing the same instance.
Returns:
Copy of the default value.
"""
return list(super(ListProperty, self).default_value())
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
validated list appropriate to save in the datastore.
"""
value = self.validate_list_contents(
super(ListProperty, self).get_value_for_datastore(model_instance))
if self.validator:
self.validator(value)
return value
class StringListProperty(ListProperty):
"""A property that stores a list of strings.
A shorthand for the most common type of ListProperty.
"""
def __init__(self, verbose_name=None, default=None, **kwds):
"""Construct StringListProperty.
Args:
verbose_name: Optional verbose name.
default: Optional default value; if omitted, an empty list is used.
**kwds: Optional additional keyword arguments, passed to ListProperty().
"""
super(StringListProperty, self).__init__(basestring,
verbose_name=verbose_name,
default=default,
**kwds)
class ReferenceProperty(Property):
"""A property that represents a many-to-one reference to another model.
For example, a reference property in model A that refers to model B forms
a many-to-one relationship from A to B: every instance of A refers to a
single B instance, and every B instance can have many A instances refer
to it.
"""
def __init__(self,
reference_class=None,
verbose_name=None,
collection_name=None,
**attrs):
"""Construct ReferenceProperty.
Args:
reference_class: Which model class this property references.
verbose_name: User friendly name of property.
collection_name: If provided, alternate name of collection on
reference_class to store back references. Use this to allow
a Model to have multiple fields which refer to the same class.
"""
super(ReferenceProperty, self).__init__(verbose_name, **attrs)
self.collection_name = collection_name
if reference_class is None:
reference_class = Model
if not ((isinstance(reference_class, type) and
issubclass(reference_class, Model)) or
reference_class is _SELF_REFERENCE):
raise KindError('reference_class must be Model or _SELF_REFERENCE')
self.reference_class = self.data_type = reference_class
def __property_config__(self, model_class, property_name):
"""Loads all of the references that point to this model.
We need to do this to create the ReverseReferenceProperty properties for
this model and create the <reference>_set attributes on the referenced
model, e.g.:
class Story(db.Model):
title = db.StringProperty()
class Comment(db.Model):
story = db.ReferenceProperty(Story)
story = Story.get(id)
print [c for c in story.comment_set]
In this example, the comment_set property was created based on the reference
from Comment to Story (which is inherently one to many).
Args:
model_class: Model class which will have its reference properties
initialized.
property_name: Name of property being configured.
Raises:
DuplicatePropertyError if referenced class already has the provided
collection name as a property.
"""
super(ReferenceProperty, self).__property_config__(model_class,
property_name)
if self.reference_class is _SELF_REFERENCE:
self.reference_class = self.data_type = model_class
if self.collection_name is None:
self.collection_name = '%s_set' % (model_class.__name__.lower())
existing_prop = getattr(self.reference_class, self.collection_name, None)
if existing_prop is not None:
if not (isinstance(existing_prop, _ReverseReferenceProperty) and
existing_prop._prop_name == property_name and
existing_prop._model.__name__ == model_class.__name__ and
existing_prop._model.__module__ == model_class.__module__):
raise DuplicatePropertyError('Class %s already has property %s '
% (self.reference_class.__name__,
self.collection_name))
setattr(self.reference_class,
self.collection_name,
_ReverseReferenceProperty(model_class, property_name))
def __get__(self, model_instance, model_class):
"""Get reference object.
This method will fetch unresolved entities from the datastore if
they are not already loaded.
Returns:
ReferenceProperty to Model object if property is set, else None.
"""
if model_instance is None:
return self
if hasattr(model_instance, self.__id_attr_name()):
reference_id = getattr(model_instance, self.__id_attr_name())
else:
reference_id = None
if reference_id is not None:
resolved = getattr(model_instance, self.__resolved_attr_name())
if resolved is not None:
return resolved
else:
instance = get(reference_id)
if instance is None:
raise Error('ReferenceProperty failed to be resolved')
setattr(model_instance, self.__resolved_attr_name(), instance)
return instance
else:
return None
def __set__(self, model_instance, value):
"""Set reference."""
value = self.validate(value)
if value is not None:
if isinstance(value, datastore.Key):
setattr(model_instance, self.__id_attr_name(), value)
setattr(model_instance, self.__resolved_attr_name(), None)
else:
setattr(model_instance, self.__id_attr_name(), value.key())
setattr(model_instance, self.__resolved_attr_name(), value)
else:
setattr(model_instance, self.__id_attr_name(), None)
setattr(model_instance, self.__resolved_attr_name(), None)
def get_value_for_datastore(self, model_instance):
"""Get key of reference rather than reference itself."""
return getattr(model_instance, self.__id_attr_name())
def validate(self, value):
"""Validate reference.
Returns:
A valid value.
Raises:
BadValueError for the following reasons:
- Value is not saved.
- Object not of correct model type for reference.
"""
if isinstance(value, datastore.Key):
return value
if value is not None and not value.has_key():
raise BadValueError(
'%s instance must have a complete key before it can be stored as a '
'reference' % self.reference_class.kind())
value = super(ReferenceProperty, self).validate(value)
if value is not None and not isinstance(value, self.reference_class):
raise KindError('Property %s must be an instance of %s' %
(self.name, self.reference_class.kind()))
return value
def __id_attr_name(self):
"""Get attribute of referenced id.
Returns:
Attribute where to store id of referenced entity.
"""
return self._attr_name()
def __resolved_attr_name(self):
"""Get attribute of resolved attribute.
The resolved attribute is where the actual loaded reference instance is
stored on the referring model instance.
Returns:
Attribute name of where to store resolved reference model instance.
"""
return '_RESOLVED' + self._attr_name()
Reference = ReferenceProperty
def SelfReferenceProperty(verbose_name=None, collection_name=None, **attrs):
"""Create a self reference.
Function for declaring a self referencing property on a model.
Example:
class HtmlNode(db.Model):
parent = db.SelfReferenceProperty('Parent', 'children')
Args:
verbose_name: User friendly name of property.
collection_name: Name of collection on model.
Raises:
ConfigurationError if reference_class provided as parameter.
"""
if 'reference_class' in attrs:
raise ConfigurationError(
'Do not provide reference_class to self-reference.')
return ReferenceProperty(_SELF_REFERENCE,
verbose_name,
collection_name,
**attrs)
SelfReference = SelfReferenceProperty
class _ReverseReferenceProperty(Property):
"""The inverse of the Reference property above.
We construct reverse references automatically for the model to which
the Reference property is pointing to create the one-to-many property for
that model. For example, if you put a Reference property in model A that
refers to model B, we automatically create a _ReverseReference property in
B called a_set that can fetch all of the model A instances that refer to
that instance of model B.
"""
def __init__(self, model, prop):
"""Constructor for reverse reference.
Constructor does not take standard values of other property types.
Args:
model: Model class that this property is a collection of.
property: Name of foreign property on referred model that points back
to this properties entity.
"""
self.__model = model
self.__property = prop
@property
def _model(self):
"""Internal helper to access the model class, read-only."""
return self.__model
@property
def _prop_name(self):
"""Internal helper to access the property name, read-only."""
return self.__property
def __get__(self, model_instance, model_class):
"""Fetches collection of model instances of this collection property."""
if model_instance is not None:
query = Query(self.__model)
return query.filter(self.__property + ' =', model_instance.key())
else:
return self
def __set__(self, model_instance, value):
"""Not possible to set a new collection."""
raise BadValueError('Virtual property is read-only')
run_in_transaction = datastore.RunInTransaction
run_in_transaction_custom_retries = datastore.RunInTransactionCustomRetries
RunInTransaction = run_in_transaction
RunInTransactionCustomRetries = run_in_transaction_custom_retries
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('stock', '0062_auto_20210511_2151'),
]
operations = [
migrations.RemoveField(
model_name='stockitemtracking',
name='link',
),
migrations.RemoveField(
model_name='stockitemtracking',
name='quantity',
),
migrations.RemoveField(
model_name='stockitemtracking',
name='system',
),
migrations.RemoveField(
model_name='stockitemtracking',
name='title',
),
]
|
def linear_search(lst,size,value):
i = 0
while i < size:
if lst[i] == value:
return i
i = i + 1
return -1
def main():
lst = [-31, 0, 1, 2, 2, 4, 65, 83, 99, 782]
size = len(lst)
original_list = ""
value = int(input("\nInput a value to search for: "))
print("\nOriginal Array: ")
for i in lst:
original_list += str(i) + " "
print(original_list)
print("\nLinear Search Big O Notation:\n--> Best Case: O(1)\n--> Average Case: O(n)\n--> Worst Case: O(n)\n")
index = linear_search(lst,size,value)
if index == -1:
print(str(value) + " was not found in that array\n")
else:
print(str(value) + " was found at index " + str(index))
if __name__ == '__main__':
main()
|
import os
import cv2
import numpy as np
from . import print_image
from . import plot_image
from . import fatal_error
from . import plot_colorbar
def _pseudocolored_image(device, histogram, bins, img, mask, background, channel, filename, resolution,
analysis_images, debug):
"""Pseudocolor image.
Inputs:
histogram = a normalized histogram of color values from one color channel
bins = number of color bins the channel is divided into
img = input image
mask = binary mask image
background = what background image?: channel image (img) or white
channel = color channel name
filename = input image filename
resolution = output image resolution
analysis_images = list of analysis image filenames
debug = print or plot. Print = save to file, Plot = print to screen.
Returns:
analysis_images = list of analysis image filenames
:param histogram: list
:param bins: int
:param img: numpy array
:param mask: numpy array
:param background: str
:param channel: str
:param filename: str
:param resolution: int
:param analysis_images: list
:return analysis_images: list
"""
mask_inv = cv2.bitwise_not(mask)
cplant = cv2.applyColorMap(histogram, colormap=2)
cplant1 = cv2.bitwise_and(cplant, cplant, mask=mask)
output_imgs = {"pseudo_on_img": {"background": "img", "img": None},
"pseudo_on_white": {"background": "white", "img": None}}
if background == 'img' or background == 'both':
# mask the background and color the plant with color scheme 'jet'
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img_back = cv2.bitwise_and(img_gray, img_gray, mask=mask_inv)
img_back3 = np.dstack((img_back, img_back, img_back))
output_imgs["pseudo_on_img"]["img"] = cv2.add(cplant1, img_back3)
if background == 'white' or background == 'both':
# Get the image size
if np.shape(img)[2] == 3:
ix, iy, iz = np.shape(img)
else:
ix, iy = np.shape(img)
size = ix, iy
back = np.zeros(size, dtype=np.uint8)
w_back = back + 255
w_back3 = np.dstack((w_back, w_back, w_back))
img_back3 = cv2.bitwise_and(w_back3, w_back3, mask=mask_inv)
output_imgs["pseudo_on_white"]["img"] = cv2.add(cplant1, img_back3)
if filename:
for key in output_imgs:
if output_imgs[key]["img"] is not None:
fig_name_pseudo = str(filename[0:-4]) + '_' + str(channel) + '_pseudo_on_' + \
output_imgs[key]["background"] + '.jpg'
path = os.path.dirname(filename)
print_image(output_imgs[key]["img"], fig_name_pseudo)
analysis_images.append(['IMAGE', 'pseudo', fig_name_pseudo])
else:
path = "."
if debug is not None:
if debug == 'print':
for key in output_imgs:
if output_imgs[key]["img"] is not None:
print_image(output_imgs[key]["img"], (str(device) + "_" + output_imgs[key]["background"] +
'_pseudocolor.jpg'))
fig_name = 'VIS_pseudocolor_colorbar_' + str(channel) + '_channel.svg'
if not os.path.isfile(os.path.join(path, fig_name)):
plot_colorbar(path, fig_name, bins)
elif debug == 'plot':
for key in output_imgs:
if output_imgs[key]["img"] is not None:
plot_image(output_imgs[key]["img"])
return analysis_images
def analyze_color(img, imgname, mask, bins, device, debug=None, hist_plot_type=None, pseudo_channel='v',
pseudo_bkg='img', resolution=300, filename=False):
"""Analyze the color properties of an image object
Inputs:
img = image
imgname = name of input image
mask = mask made from selected contours
device = device number. Used to count steps in the pipeline
debug = None, print, or plot. Print = save to file, Plot = print to screen.
hist_plot_type = 'None', 'all', 'rgb','lab' or 'hsv'
color_slice_type = 'None', 'rgb', 'hsv' or 'lab'
pseudo_channel = 'None', 'l', 'm' (green-magenta), 'y' (blue-yellow), h','s', or 'v', creates pseduocolored image
based on the specified channel
pseudo_bkg = 'img' => channel image, 'white' => white background image, 'both' => both img and white options
filename = False or image name. If defined print image
Returns:
device = device number
hist_header = color histogram data table headers
hist_data = color histogram data table values
analysis_images = list of output images
:param img: numpy array
:param imgname: str
:param mask: numpy array
:param bins: int
:param device: int
:param debug: str
:param hist_plot_type: str
:param pseudo_channel: str
:param pseudo_bkg: str
:param resolution: int
:param filename: str
:return device: int
:return hist_header: list
:return hist_data: list
:return analysis_images: list
"""
device += 1
masked = cv2.bitwise_and(img, img, mask=mask)
b, g, r = cv2.split(masked)
lab = cv2.cvtColor(masked, cv2.COLOR_BGR2LAB)
l, m, y = cv2.split(lab)
hsv = cv2.cvtColor(masked, cv2.COLOR_BGR2HSV)
h, s, v = cv2.split(hsv)
# Color channel dictionary
norm_channels = {"b": b / (256 / bins),
"g": g / (256 / bins),
"r": r / (256 / bins),
"l": l / (256 / bins),
"m": m / (256 / bins),
"y": y / (256 / bins),
"h": h / (256 / bins),
"s": s / (256 / bins),
"v": v / (256 / bins)
}
# Histogram plot types
hist_types = {"all": ("b", "g", "r", "l", "m", "y", "h", "s", "v"),
"rgb": ("b", "g", "r"),
"lab": ("l", "m", "y"),
"hsv": ("h", "s", "v")}
# If the user-input pseudo_channel is not None and is not found in the list of accepted channels, exit
if pseudo_channel is not None and pseudo_channel not in norm_channels:
fatal_error("Pseudocolor channel was " + str(pseudo_channel) +
', but can only be one of the following: None, "l", "m", "y", "h", "s" or "v"!')
# If the user-input pseudocolored image background is not in the accepted input list, exit
if pseudo_bkg not in ["white", "img", "both"]:
fatal_error("The pseudocolored image background was " + str(pseudo_bkg) +
', but can only be one of the following: "white", "img", or "both"!')
# If the user-input histogram color-channel plot type is not in the list of accepted channels, exit
if hist_plot_type is not None and hist_plot_type not in hist_types:
fatal_error("The histogram plot type was " + str(hist_plot_type) +
', but can only be one of the following: None, "all", "rgb", "lab", or "hsv"!')
histograms = {
"b": {"label": "blue", "graph_color": "blue",
"hist": cv2.calcHist([norm_channels["b"]], [0], mask, [bins], [0, (bins - 1)])},
"g": {"label": "green", "graph_color": "forestgreen",
"hist": cv2.calcHist([norm_channels["g"]], [0], mask, [bins], [0, (bins - 1)])},
"r": {"label": "red", "graph_color": "red",
"hist": cv2.calcHist([norm_channels["r"]], [0], mask, [bins], [0, (bins - 1)])},
"l": {"label": "lightness", "graph_color": "dimgray",
"hist": cv2.calcHist([norm_channels["l"]], [0], mask, [bins], [0, (bins - 1)])},
"m": {"label": "green-magenta", "graph_color": "magenta",
"hist": cv2.calcHist([norm_channels["m"]], [0], mask, [bins], [0, (bins - 1)])},
"y": {"label": "blue-yellow", "graph_color": "yellow",
"hist": cv2.calcHist([norm_channels["y"]], [0], mask, [bins], [0, (bins - 1)])},
"h": {"label": "hue", "graph_color": "blueviolet",
"hist": cv2.calcHist([norm_channels["h"]], [0], mask, [bins], [0, (bins - 1)])},
"s": {"label": "saturation", "graph_color": "cyan",
"hist": cv2.calcHist([norm_channels["s"]], [0], mask, [bins], [0, (bins - 1)])},
"v": {"label": "value", "graph_color": "orange",
"hist": cv2.calcHist([norm_channels["v"]], [0], mask, [bins], [0, (bins - 1)])}
}
hist_data_b = [l[0] for l in histograms["b"]["hist"]]
hist_data_g = [l[0] for l in histograms["g"]["hist"]]
hist_data_r = [l[0] for l in histograms["r"]["hist"]]
hist_data_l = [l[0] for l in histograms["l"]["hist"]]
hist_data_m = [l[0] for l in histograms["m"]["hist"]]
hist_data_y = [l[0] for l in histograms["y"]["hist"]]
hist_data_h = [l[0] for l in histograms["h"]["hist"]]
hist_data_s = [l[0] for l in histograms["s"]["hist"]]
hist_data_v = [l[0] for l in histograms["v"]["hist"]]
binval = np.arange(0, bins)
bin_values = [l for l in binval]
# Store Color Histogram Data
hist_header = [
'HEADER_HISTOGRAM',
'bin-number',
'bin-values',
'blue',
'green',
'red',
'lightness',
'green-magenta',
'blue-yellow',
'hue',
'saturation',
'value'
]
hist_data = [
'HISTOGRAM_DATA',
bins,
bin_values,
hist_data_b,
hist_data_g,
hist_data_r,
hist_data_l,
hist_data_m,
hist_data_y,
hist_data_h,
hist_data_s,
hist_data_v
]
analysis_images = []
if pseudo_channel is not None:
analysis_images = _pseudocolored_image(device, norm_channels[pseudo_channel], bins, img, mask, pseudo_bkg,
pseudo_channel, filename, resolution, analysis_images, debug)
if hist_plot_type is not None and filename:
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
# Create Histogram Plot
for channel in hist_types[hist_plot_type]:
plt.plot(histograms[channel]["hist"], color=histograms[channel]["graph_color"],
label=histograms[channel]["label"])
plt.xlim([0, bins - 1])
plt.legend()
# Print plot
fig_name = (str(filename[0:-4]) + '_' + str(hist_plot_type) + '_hist.svg')
plt.savefig(fig_name)
analysis_images.append(['IMAGE', 'hist', fig_name])
if debug == 'print':
fig_name = (str(device) + '_' + str(hist_plot_type) + '_hist.svg')
plt.savefig(fig_name)
plt.clf()
return device, hist_header, hist_data, analysis_images
|
class Solution:
# @param {integer[]} height
# @return {integer}
def largestRectangleArea(self, height):
n = len(height)
ma = 0
stack = [-1]
for i in xrange(n):
while(stack[-1] > -1):
if height[i]<height[stack[-1]]:
top = stack.pop()
ma = max(ma, height[top]*(i-1-stack[-1]))
else:
break
stack.append(i)
while(stack[-1] != -1):
top = stack.pop()
ma = max(ma, height[top]*(n-1-stack[-1]))
return ma
|
'''
Created on Jan 18, 2010
@author: Paul
'''
from SQLEng import SQLEng
class PduSender(object):
'''
classdocs
This class is designed for Gammu-smsd
Inserting a record into MySQL
Gammu-smsd will send the record
Using command line will cause smsd stop for a while
'''
def get_mesg(self,byte_array):
mesg = ""
for byte in byte_array:
if byte < 16 :
val = hex(byte)
if val == "0x0" :
val = "00"
else :
val = val.lstrip("0x")
val = "{0}{1}".format('0', val)
else :
val = hex(byte)
val = val.lstrip("0x")
mesg += val
return mesg
def send(self,to,byte_array):
sEng = SQLEng()
sEng.exeSQL(sEng.getInsetSentBox(to, self.get_mesg(byte_array)))
def __init__(self):
'''
Constructor
'''
pass
|
from __future__ import absolute_import
from .cell import Cell, WriteOnlyCell
from .read_only import ReadOnlyCell
|
from Robinhood import Robinhood
my_trader = Robinhood(username="YOUR_USERNAME", password="YOUR_PASSWORD");
#Note: Sometimes more than one instrument may be returned for a given stock symbol
stock_instrument = my_trader.instruments("GEVO")[0]
my_trader.print_quote("AAPL")
my_trader.print_quote();
my_trader.print_quotes(stocks=["BBRY", "FB", "MSFT"])
quote_info = my_trader.quote_data("GEVO")
print(quote_info);
buy_order = my_trader.place_buy_order(stock_instrument, 1)
sell_order = my_trader.place_sell_order(stock_instrument, 1)
|
from multiprocessing import Process, Pool
import os, time
def proc(name):
print(time.asctime(), 'child process(name: %s) id %s. ppid %s' % (name, os.getpid(), os.getppid()))
time.sleep(3)
print(time.asctime(), 'child process end')
if __name__ == '__main__':
p = Process(target = proc, args = ('child',))
print(time.asctime(), 'child process will start')
p.start()
p.join()
print('first child process end')
pl = Pool(4)
for index in range(4):
pl.apply_async(proc, args = (index,))
pl.close()
pl.join()
print(time.asctime(), 'parent process end')
|
dimensions(8,8)
wall((2,0),(2,4))
wall((2,4),(4,4))
wall((2,6),(6,6))
wall((6,6),(6,0))
wall((6,2),(4,2))
initialRobotLoc(1.0, 1.0)
|
from __future__ import unicode_literals
import unittest
class TestBlogSettings(unittest.TestCase):
pass
|
import sys,os
import textwrap
def print_header():
print textwrap.dedent("""\
##fileformat=VCFv4.1
##phasing=none
##INDIVIDUAL=TRUTH
##SAMPLE=<ID=TRUTH,Individual="TRUTH",Description="bamsurgeon spike-in">
##INFO=<ID=CIPOS,Number=2,Type=Integer,Description="Confidence interval around POS for imprecise variants">
##INFO=<ID=IMPRECISE,Number=0,Type=Flag,Description="Imprecise structural variation">
##INFO=<ID=SVTYPE,Number=1,Type=String,Description="Type of structural variant">
##INFO=<ID=SVLEN,Number=.,Type=Integer,Description="Difference in length between REF and ALT alleles">
##INFO=<ID=SOMATIC,Number=0,Type=Flag,Description="Somatic mutation in primary">
##INFO=<ID=VAF,Number=1,Type=Float,Description="Variant Allele Frequency">
##INFO=<ID=DPR,Number=1,Type=Float,Description="Avg Depth in Region (+/- 1bp)">
##INFO=<ID=MATEID,Number=1,Type=String,Description="Breakend mate">
##ALT=<ID=INV,Description="Inversion">
##ALT=<ID=DUP,Description="Duplication">
##ALT=<ID=DEL,Description="Deletion">
##ALT=<ID=INS,Description="Insertion">
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">
#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tSPIKEIN""")
if len(sys.argv) == 2:
print_header()
logdir_files = os.listdir(sys.argv[1])
for filename in logdir_files:
if filename.endswith('.log'):
with open(sys.argv[1] + '/' + filename, 'r') as infile:
for line in infile:
if line.startswith('snv'):
#chrom, pos, mut = line.strip().split()
c = line.strip().split()
chrom = c[1].split(':')[0]
pos = c[3]
mut = c[4]
dpr = c[6]
vaf = c[7]
ref,alt = mut.split('-->')
print "\t".join((chrom,pos,'.',ref,alt,'100','PASS','SOMATIC;VAF=' + vaf + ';DPR=' + dpr,'GT','0/1'))
else:
print "usage:", sys.argv[0], "<log directory>"
|
"""
Utility methods, for compatibility between Python version
:author: Thomas Calmant
:copyright: Copyright 2017, Thomas Calmant
:license: Apache License 2.0
:version: 0.3.1
..
Copyright 2017 Thomas Calmant
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
__version_info__ = (0, 3, 1)
__version__ = ".".join(str(x) for x in __version_info__)
__docformat__ = "restructuredtext en"
if sys.version_info[0] < 3:
# Python 2
# pylint: disable=E1101
import types
try:
STRING_TYPES = (
types.StringType,
types.UnicodeType
)
except NameError:
# Python built without unicode support
STRING_TYPES = (types.StringType,)
NUMERIC_TYPES = (
types.IntType,
types.LongType,
types.FloatType
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
# pylint: disable=E0602
if type(string) is unicode:
return str(string)
return string
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data)
else:
# Python 3
# pylint: disable=E1101
STRING_TYPES = (
bytes,
str
)
NUMERIC_TYPES = (
int,
float
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
if type(string) is bytes:
return string
return bytes(string, "UTF-8")
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data, "UTF-8")
try:
import enum
def is_enum(obj):
"""
Checks if an object is from an enumeration class
:param obj: Object to test
:return: True if the object is an enumeration item
"""
return isinstance(obj, enum.Enum)
except ImportError:
# Pre-Python 3.4
def is_enum(_):
"""
Before Python 3.4, enumerations didn't exist.
:param _: Object to test
:return: Always False
"""
return False
DictType = dict
ListType = list
TupleType = tuple
ITERABLE_TYPES = (
list,
set, frozenset,
tuple
)
VALUE_TYPES = (
bool,
type(None)
)
PRIMITIVE_TYPES = STRING_TYPES + NUMERIC_TYPES + VALUE_TYPES
|
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
import camera
import time
class Display(object):
# Inheritrance convinience functions
def init(self): pass
def close(self): pass
def mouse(self, mouseButton, buttonState, x, y): pass
def mouseMotion(self, x, y, dx, dy): pass
def passiveMouseMotion(self, x, y, dx, dy): pass
def keyboard(self, key, x, y): pass
def specialKeys(self, key, x, y): pass
def timerFired(self, value): pass
def draw(self): pass
# Initialization function
def __init__(self, width = 1280, height = 720, frameName = "OpenGL"):
self.frameSize = (self.width, self.height) = (width, height)
self.frameName = frameName
self.timerDelay = 20
self.clearColor = (135.0/255, 206.0/255, 250.0/255, 1)
self.defaultColor = (1, 1, 1)
# Camera positioning
self.pos = (0, 0, 0)
self.ypr = (0, 0, 0)
self.init()
# Set up graphics
self.initGL()
self.initGLUT()
self.camera = camera.Camera(self.width, self.height)
# For mouse motion
self._mouseX = None
self._mouseY = None
# One-time GL commands
def initGL(self):
glClearColor(*self.clearColor)
# Initialize the window manager (GLUT)
def initGLUT(self):
glutInit()
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH)
glutInitWindowSize(*self.frameSize)
glutCreateWindow(self.frameName)
# Register all the convenience functions
glutDisplayFunc(self.drawWrapper)
glutIdleFunc(self.drawWrapper)
glutTimerFunc(self.timerDelay, self.timerFired, 0)
glutMouseFunc(self.mouse)
glutMotionFunc(self.mouseMotionWrapper)
glutPassiveMotionFunc(self.passiveMouseMotionWrapper)
glutKeyboardFunc(self.keyboard)
glutSpecialFunc(self.specialKeys)
glutReshapeFunc(self.reshape)
# Try to register a close function (fall back to a different one)
try:
glutCloseFunc(self.close)
except:
glutWMCloseFunc(self.close)
# GL commands executed before drawing
def preGL(self):
glShadeModel(GL_FLAT)
glEnable(GL_DEPTH_TEST)
# Set up colors and clear buffers
glClearColor(*self.clearColor)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glColor3f(*self.defaultColor)
glLoadIdentity()
# Commands after GL is done
def postGL(self):
glutSwapBuffers()
time.sleep(1/60.0)
# Wrapper to re-register timer event
def timerFiredWrapper(self, value):
self.timerFired(value)
glutTimerFunc(self.timerDelay, self.timerFired, value + 1)
# Wrapper to handle as much GL as possible
def drawWrapper(self):
self.preGL()
# Let the camera draw the view
self.camera.draw(self.draw, self.pos, self.ypr)
self.postGL()
# Wrapper to pass change in position as well as position
# Only called when mouse motion and button pressed
def mouseMotionWrapper(self, x, y):
if(self._mouseX == None or self._mouseY == None):
(self._mouseX, self._mouseY) = (x, y)
(dx, dy) = (x - self._mouseX, y - self._mouseY)
self.mouseMotion(x, y, dx, dy)
(self._mouseX, self._mouseY) = (x, y)
# Wrapper to pass change in position as well as position
# Called when mouse motion and not button pressed
def passiveMouseMotionWrapper(self, x, y):
if(self._mouseX == None or self._mouseY == None):
(self._mouseX, self._mouseY) = (x, y)
(dx, dy) = (x - self._mouseX, y - self._mouseY)
self.passiveMouseMotion(x, y, dx, dy)
(self._mouseX, self._mouseY) = (x, y)
# Update when resizing the window
def reshape(self, width, height):
if(self.width != width or self.height != height):
glutReshapeWindow(width, height)
self.camera.width = width
self.camera.height = height
# Run the GL
def run(self):
glutMainLoop()
|
import copy
from collections import OrderedDict
from collections import defaultdict
from conans.model.env_info import EnvValues
from conans.model.options import OptionsValues
from conans.model.values import Values
class Profile(object):
"""A profile contains a set of setting (with values), environment variables
"""
def __init__(self):
# Sections
self.settings = OrderedDict()
self.package_settings = defaultdict(OrderedDict)
self.env_values = EnvValues()
self.options = OptionsValues()
self.build_requires = OrderedDict() # conan_ref Pattern: list of conan_ref
@property
def settings_values(self):
return Values.from_list(list(self.settings.items()))
@property
def package_settings_values(self):
result = {}
for pkg, settings in self.package_settings.items():
result[pkg] = list(settings.items())
return result
def dumps(self):
result = ["[settings]"]
for name, value in self.settings.items():
result.append("%s=%s" % (name, value))
for package, values in self.package_settings.items():
for name, value in values.items():
result.append("%s:%s=%s" % (package, name, value))
result.append("[options]")
result.append(self.options.dumps())
result.append("[build_requires]")
for pattern, req_list in self.build_requires.items():
result.append("%s: %s" % (pattern, ", ".join(str(r) for r in req_list)))
result.append("[env]")
result.append(self.env_values.dumps())
return "\n".join(result).replace("\n\n", "\n")
def update(self, other):
self.update_settings(other.settings)
self.update_package_settings(other.package_settings)
# this is the opposite
other.env_values.update(self.env_values)
self.env_values = other.env_values
self.options.update(other.options)
for pattern, req_list in other.build_requires.items():
self.build_requires.setdefault(pattern, []).extend(req_list)
def update_settings(self, new_settings):
"""Mix the specified settings with the current profile.
Specified settings are prioritized to profile"""
assert(isinstance(new_settings, OrderedDict))
# apply the current profile
res = copy.copy(self.settings)
if new_settings:
# Invalidate the current subsettings if the parent setting changes
# Example: new_settings declare a different "compiler", so invalidate the current "compiler.XXX"
for name, value in new_settings.items():
if "." not in name:
if name in self.settings and self.settings[name] != value:
for cur_name, _ in self.settings.items():
if cur_name.startswith("%s." % name):
del res[cur_name]
# Now merge the new values
res.update(new_settings)
self.settings = res
def update_package_settings(self, package_settings):
"""Mix the specified package settings with the specified profile.
Specified package settings are prioritized to profile"""
for package_name, settings in package_settings.items():
self.package_settings[package_name].update(settings)
|
__author__ = 'sei'
DEFAULT_SERIAL = '/dev/ttyUSB0'
DEFAULT_BAUDRATE = 57600
|
patterns = [r'^.*?/bc_jpg_makerDrop/(crop_fullsize_pad_center)/?.*?/(\d{9}(.*?))\.(.*?)$',
r'^.*?/bc_jpg_makerDrop/(crop_fullsize_pad_anchor)/?.*?/(\d{9}(.*?))\.(.*?)$',
r'^.*?/bfly_jpg_makerDrop/(crop_fullsize_center)/?.*?/(\d{9}(.*?))\.(.*?)$',
r'^.*?/bfly_jpg_makerDrop/(crop_fullsize_anchor)/?.*?/(\d{9}(.*?))\.(.*?)$']*10
strings = ["/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_anchor/346470409.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_center/346470408_1.jpg",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_anchor/346470407_alt01.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_center/346470406_1.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_anchor/346880405.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_center/346470404_1.jpg",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_center/346470403.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_anchor/336470402.jpg"]*10
def matches_pattern(str, patterns):
for pattern in patterns:
if pattern.match(str):
return pattern.match(str), pattern
return False
def regex_matcherator(strings,patterns):
import re
compiled_patterns = list(map(re.compile, patterns))
for s in strings:
if matches_pattern(s, compiled_patterns):
print matches_pattern(s, compiled_patterns)[1].pattern
print '--'.join(s.split('/')[-2:])
print matches_pattern(s, compiled_patterns)[0].groups()
print '\n'
r = regex_matcherator(strings,patterns)
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/scout/trap/shared_trap_webber.iff"
result.attribute_template_id = -1
result.stfName("item_n","trap_webber")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/wearables/ithorian/shared_ith_shirt_s09.iff"
result.attribute_template_id = 11
result.stfName("wearables_name","ith_shirt_s09")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
"""Extension to execute code outside the Python shell window.
This adds the following commands:
- Check module does a full syntax check of the current module.
It also runs the tabnanny to catch any inconsistent tabs.
- Run module executes the module's code in the __main__ namespace. The window
must have been saved previously. The module is added to sys.modules, and is
also added to the __main__ namespace.
XXX GvR Redesign this interface (yet again) as follows:
- Present a dialog box for ``Run Module''
- Allow specify command line arguments in the dialog box
"""
import os
import re
import string
import tabnanny
import tokenize
import tkMessageBox
from idlelib import PyShell
from idlelib.configHandler import idleConf
IDENTCHARS = string.ascii_letters + string.digits + "_"
indent_message = """Error: Inconsistent indentation detected!
1) Your indentation is outright incorrect (easy to fix), OR
2) Your indentation mixes tabs and spaces.
To fix case 2, change all tabs to spaces by using Edit->Select All followed \
by Format->Untabify Region and specify the number of columns used by each tab.
"""
class ScriptBinding:
menudefs = [
('run', [None,
('Check Module', '<<check-module>>'),
('Run Module', '<<run-module>>'), ]), ]
def __init__(self, editwin):
self.editwin = editwin
# Provide instance variables referenced by Debugger
# XXX This should be done differently
self.flist = self.editwin.flist
self.root = self.editwin.root
def check_module_event(self, event):
filename = self.getfilename()
if not filename:
return 'break'
if not self.checksyntax(filename):
return 'break'
if not self.tabnanny(filename):
return 'break'
def tabnanny(self, filename):
f = open(filename, 'r')
try:
tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
except tokenize.TokenError, msg:
msgtxt, (lineno, start) = msg
self.editwin.gotoline(lineno)
self.errorbox("Tabnanny Tokenizing Error",
"Token Error: %s" % msgtxt)
return False
except tabnanny.NannyNag, nag:
# The error messages from tabnanny are too confusing...
self.editwin.gotoline(nag.get_lineno())
self.errorbox("Tab/space error", indent_message)
return False
return True
def checksyntax(self, filename):
self.shell = shell = self.flist.open_shell()
saved_stream = shell.get_warning_stream()
shell.set_warning_stream(shell.stderr)
f = open(filename, 'r')
source = f.read()
f.close()
if '\r' in source:
source = re.sub(r"\r\n", "\n", source)
source = re.sub(r"\r", "\n", source)
if source and source[-1] != '\n':
source = source + '\n'
text = self.editwin.text
text.tag_remove("ERROR", "1.0", "end")
try:
try:
# If successful, return the compiled code
return compile(source, filename, "exec")
except (SyntaxError, OverflowError), err:
try:
msg, (errorfilename, lineno, offset, line) = err
if not errorfilename:
err.args = msg, (filename, lineno, offset, line)
err.filename = filename
self.colorize_syntax_error(msg, lineno, offset)
except:
msg = "*** " + str(err)
self.errorbox("Syntax error",
"There's an error in your program:\n" + msg)
return False
finally:
shell.set_warning_stream(saved_stream)
def colorize_syntax_error(self, msg, lineno, offset):
text = self.editwin.text
pos = "0.0 + %d lines + %d chars" % (lineno-1, offset-1)
text.tag_add("ERROR", pos)
char = text.get(pos)
if char and char in IDENTCHARS:
text.tag_add("ERROR", pos + " wordstart", pos)
if '\n' == text.get(pos): # error at line end
text.mark_set("insert", pos)
else:
text.mark_set("insert", pos + "+1c")
text.see(pos)
def run_module_event(self, event):
"""Run the module after setting up the environment.
First check the syntax. If OK, make sure the shell is active and
then transfer the arguments, set the run environment's working
directory to the directory of the module being executed and also
add that directory to its sys.path if not already included.
"""
filename = self.getfilename()
if not filename:
return 'break'
code = self.checksyntax(filename)
if not code:
return 'break'
if not self.tabnanny(filename):
return 'break'
shell = self.shell
interp = shell.interp
if PyShell.use_subprocess:
shell.restart_shell()
dirname = os.path.dirname(filename)
# XXX Too often this discards arguments the user just set...
interp.runcommand("""if 1:
_filename = %r
import sys as _sys
from os.path import basename as _basename
if (not _sys.argv or
_basename(_sys.argv[0]) != _basename(_filename)):
_sys.argv = [_filename]
import os as _os
_os.chdir(%r)
del _filename, _sys, _basename, _os
\n""" % (filename, dirname))
interp.prepend_syspath(filename)
# XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still
# go to __stderr__. With subprocess, they go to the shell.
# Need to change streams in PyShell.ModifiedInterpreter.
interp.runcode(code)
return 'break'
def getfilename(self):
"""Get source filename. If not saved, offer to save (or create) file
The debugger requires a source file. Make sure there is one, and that
the current version of the source buffer has been saved. If the user
declines to save or cancels the Save As dialog, return None.
If the user has configured IDLE for Autosave, the file will be
silently saved if it already exists and is dirty.
"""
filename = self.editwin.io.filename
if not self.editwin.get_saved():
autosave = idleConf.GetOption('main', 'General',
'autosave', type='bool')
if autosave and filename:
self.editwin.io.save(None)
else:
reply = self.ask_save_dialog()
self.editwin.text.focus_set()
if reply == "ok":
self.editwin.io.save(None)
filename = self.editwin.io.filename
else:
filename = None
return filename
def ask_save_dialog(self):
msg = "Source Must Be Saved\n" + 5*' ' + "OK to Save?"
mb = tkMessageBox.Message(title="Save Before Run or Check",
message=msg,
icon=tkMessageBox.QUESTION,
type=tkMessageBox.OKCANCEL,
default=tkMessageBox.OK,
master=self.editwin.text)
return mb.show()
def errorbox(self, title, message):
# XXX This should really be a function of EditorWindow...
tkMessageBox.showerror(title, message, master=self.editwin.text)
self.editwin.text.focus_set()
|
from fabric.api import task, run, local, cd, hosts, env
import time
from oozappa.config import get_config, procure_common_functions
_settings = get_config()
procure_common_functions()
import sys
from common_multiple_fabric_environment import _deploy_template_sample_a
test_host = ('192.168.0.110',) #FIXME
@task
def ls():
u'''run ls command on local machine.'''
local('ls -la')
@task
def ps():
u'''run ls command on local machine.'''
local('ps ax')
@task
def sys_path():
import sys
print(sys.path)
@task
def sleep():
u'''sleep 5 second.'''
print('stop 5 sec...')
time.sleep(5)
print('5 sec... passed')
@task
def printsetting():
u'''print setting from staging.vars and common.vars'''
print(_settings)
@task
@hosts(test_host)
def deploy_template_sample_a():
_deploy_template_sample_a(_settings.sample_template_vars.sample_a)
@task
def launch_instance_from_app_a_image():
u'''eg. launch instance from app a image.'''
print('launch_instance_from_app_a_image')
@task
def set_env_latest_app_a():
u'''eg. search latest app type a instance and set fabric env.'''
print('set_env_latest_app_a')
@task
def set_env_latest_app_b():
u'''eg. search latest app type b instance and set fabric env.'''
print('set_env_latest_app_b')
@task
def launch_instance_from_app_b_image():
u'''eg. launch instance from app b image.'''
print('launch_instance_from_app_b_image')
@task
def production_specific_setting():
u'''eg. production specific setting'''
print('production_specific_setting')
|
KEY_UP = "up"
KEY_DOWN = "down"
KEY_RIGHT = "right"
KEY_LEFT = "left"
KEY_INSERT = "insert"
KEY_HOME = "home"
KEY_END = "end"
KEY_PAGEUP = "pageup"
KEY_PAGEDOWN = "pagedown"
KEY_BACKSPACE = "backspace"
KEY_DELETE = "delete"
KEY_TAB = "tab"
KEY_ENTER = "enter"
KEY_PAUSE = "pause"
KEY_ESCAPE = "escape"
KEY_SPACE = "space"
KEY_KEYPAD0 = "keypad0"
KEY_KEYPAD1 = "keypad1"
KEY_KEYPAD2 = "keypad2"
KEY_KEYPAD3 = "keypad3"
KEY_KEYPAD4 = "keypad4"
KEY_KEYPAD5 = "keypad5"
KEY_KEYPAD6 = "keypad6"
KEY_KEYPAD7 = "keypad7"
KEY_KEYPAD8 = "keypad8"
KEY_KEYPAD9 = "keypad9"
KEY_KEYPAD_PERIOD = "keypad_period"
KEY_KEYPAD_DIVIDE = "keypad_divide"
KEY_KEYPAD_MULTIPLY = "keypad_multiply"
KEY_KEYPAD_MINUS = "keypad_minus"
KEY_KEYPAD_PLUS = "keypad_plus"
KEY_KEYPAD_ENTER = "keypad_enter"
KEY_CLEAR = "clear"
KEY_F1 = "f1"
KEY_F2 = "f2"
KEY_F3 = "f3"
KEY_F4 = "f4"
KEY_F5 = "f5"
KEY_F6 = "f6"
KEY_F7 = "f7"
KEY_F8 = "f8"
KEY_F9 = "f9"
KEY_F10 = "f10"
KEY_F11 = "f11"
KEY_F12 = "f12"
KEY_F13 = "f13"
KEY_F14 = "f14"
KEY_F15 = "f15"
KEY_F16 = "f16"
KEY_F17 = "f17"
KEY_F18 = "f18"
KEY_F19 = "f19"
KEY_F20 = "f20"
KEY_SYSREQ = "sysreq"
KEY_BREAK = "break"
KEY_CONTEXT_MENU = "context_menu"
KEY_BROWSER_BACK = "browser_back"
KEY_BROWSER_FORWARD = "browser_forward"
KEY_BROWSER_REFRESH = "browser_refresh"
KEY_BROWSER_STOP = "browser_stop"
KEY_BROWSER_SEARCH = "browser_search"
KEY_BROWSER_FAVORITES = "browser_favorites"
KEY_BROWSER_HOME = "browser_home"
|
import frappe
import json
from frappe.model.document import Document
from frappe.utils import get_fullname, parse_addr
exclude_from_linked_with = True
class ToDo(Document):
DocType = 'ToDo'
def validate(self):
self._assignment = None
if self.is_new():
if self.assigned_by == self.allocated_to:
assignment_message = frappe._("{0} self assigned this task: {1}").format(get_fullname(self.assigned_by), self.description)
else:
assignment_message = frappe._("{0} assigned {1}: {2}").format(get_fullname(self.assigned_by), get_fullname(self.allocated_to), self.description)
self._assignment = {
"text": assignment_message,
"comment_type": "Assigned"
}
else:
# NOTE the previous value is only available in validate method
if self.get_db_value("status") != self.status:
if self.allocated_to == frappe.session.user:
removal_message = frappe._("{0} removed their assignment.").format(
get_fullname(frappe.session.user))
else:
removal_message = frappe._("Assignment of {0} removed by {1}").format(
get_fullname(self.allocated_to), get_fullname(frappe.session.user))
self._assignment = {
"text": removal_message,
"comment_type": "Assignment Completed"
}
def on_update(self):
if self._assignment:
self.add_assign_comment(**self._assignment)
self.update_in_reference()
def on_trash(self):
self.delete_communication_links()
self.update_in_reference()
def add_assign_comment(self, text, comment_type):
if not (self.reference_type and self.reference_name):
return
frappe.get_doc(self.reference_type, self.reference_name).add_comment(comment_type, text)
def delete_communication_links(self):
# unlink todo from linked comments
return frappe.db.delete("Communication Link", {
"link_doctype": self.doctype,
"link_name": self.name
})
def update_in_reference(self):
if not (self.reference_type and self.reference_name):
return
try:
assignments = frappe.get_all("ToDo", filters={
"reference_type": self.reference_type,
"reference_name": self.reference_name,
"status": ("!=", "Cancelled")
}, pluck="allocated_to")
assignments.reverse()
frappe.db.set_value(self.reference_type, self.reference_name,
"_assign", json.dumps(assignments), update_modified=False)
except Exception as e:
if frappe.db.is_table_missing(e) and frappe.flags.in_install:
# no table
return
elif frappe.db.is_column_missing(e):
from frappe.database.schema import add_column
add_column(self.reference_type, "_assign", "Text")
self.update_in_reference()
else:
raise
@classmethod
def get_owners(cls, filters=None):
"""Returns list of owners after applying filters on todo's.
"""
rows = frappe.get_all(cls.DocType, filters=filters or {}, fields=['allocated_to'])
return [parse_addr(row.allocated_to)[1] for row in rows if row.allocated_to]
def on_doctype_update():
frappe.db.add_index("ToDo", ["reference_type", "reference_name"])
def get_permission_query_conditions(user):
if not user: user = frappe.session.user
todo_roles = frappe.permissions.get_doctype_roles('ToDo')
if 'All' in todo_roles:
todo_roles.remove('All')
if any(check in todo_roles for check in frappe.get_roles(user)):
return None
else:
return """(`tabToDo`.allocated_to = {user} or `tabToDo`.assigned_by = {user})"""\
.format(user=frappe.db.escape(user))
def has_permission(doc, ptype="read", user=None):
user = user or frappe.session.user
todo_roles = frappe.permissions.get_doctype_roles('ToDo', ptype)
if 'All' in todo_roles:
todo_roles.remove('All')
if any(check in todo_roles for check in frappe.get_roles(user)):
return True
else:
return doc.allocated_to==user or doc.assigned_by==user
@frappe.whitelist()
def new_todo(description):
frappe.get_doc({
'doctype': 'ToDo',
'description': description
}).insert()
|
'''
Created on Mar 4, 2017
@author: preiniger
'''
def __validate_alliance(alliance_color, teams, official_sr):
team1sr = None
team2sr = None
team3sr = None
# TODO: there has to be a better way... but I'd rather not touch the DB
for sr in teams[0].scoreresult_set.all():
if sr.match.matchNumber == official_sr.official_match.matchNumber:
team1sr = sr
break
for sr in teams[1].scoreresult_set.all():
if sr.match.matchNumber == official_sr.official_match.matchNumber:
team2sr = sr
break
for sr in teams[2].scoreresult_set.all():
if sr.match.matchNumber == official_sr.official_match.matchNumber:
team3sr = sr
break
team_srs = [team1sr, team2sr, team3sr]
team_srs = [sr for sr in team_srs if sr != None]
warning_messages = []
error_messages = []
for team in teams:
if team != official_sr.team1 and team != official_sr.team2 and team != official_sr.team3:
error_messages.append((alliance_color + " team mismatch", teams, team.teamNumber))
if len(team_srs) != 3:
error_messages.append((alliance_color + " wrong number of teams", 3, len(team_srs)))
tele_high_tubes = 0
tele_mid_tubes = 0
tele_low_tubes = 0
for sr in team_srs:
tele_high_tubes += sr.high_tubes_hung
tele_mid_tubes += sr.mid_tubes_hung
tele_low_tubes += sr.low_tubes_hung
total_score = tele_high_tubes * 3 + tele_mid_tubes * 2 + tele_low_tubes
if total_score != official_sr.total_score:
warning_messages.append((alliance_color + " total score", official_sr.total_score, total_score))
return warning_messages, error_messages
def validate_match(match, official_match, official_srs):
error_level = 0
warning_messages = []
error_messages = []
red_teams = [match.red1, match.red2, match.red3]
blue_teams = [match.blue1, match.blue2, match.blue3]
red_sr = official_srs[0]
blue_sr = official_srs[1]
red_warning, red_error = __validate_alliance("Red", red_teams, red_sr)
blue_warning, blue_error = __validate_alliance("Blue", blue_teams, blue_sr)
warning_messages.extend(red_warning)
warning_messages.extend(blue_warning)
error_messages.extend(red_error)
error_messages.extend(blue_error)
if len(error_messages) != 0:
error_level = 2
elif len(warning_messages) != 0:
error_level = 1
return error_level, warning_messages, error_messages
|
import json
import requests
import key
API_key = key.getAPIkey()
def load_champion_pictures(champion_json):
print len(champion_json['data'])
version = champion_json['version']
print "version: " + version
for champion in champion_json['data']:
print champion
r = requests.get('http://ddragon.leagueoflegends.com/cdn/' + version + '/img/champion/' + champion + '.png')
if r.status_code == 200:
img = r.content
with open('static/images/champions/' + champion_json['data'][champion]['name'] + '.png', 'w') as f:
f.write(img)
print "img created"
else:
print "pictures: something went wrong"
def load_champion_json():
try:
r = requests.get('https://global.api.pvp.net/api/lol/static-data/na/v1.2/champion?&api_key=' + API_key)
champion_json = r.json()
if 'status' in champion_json:
print champion_json['status']['message']
return
load_champion_pictures(champion_json)
# quick fix to change MonkeyKing to Wukong so that sort_keys sorts it properly
champion_json['data']['Wukong'] = champion_json['data']['MonkeyKing']
del champion_json['data']['MonkeyKing']
except ValueError as e:
print e.message
return
with open('static/json/champion.json', 'w') as f:
json.dump(champion_json, f, sort_keys=True)
load_champion_json()
|
from util import nodeenv_delegate
from setup import setup
if __name__ == "__main__":
setup(skip_dependencies=True)
nodeenv_delegate("npx")
|
from math import floor
from typing import (
Tuple,
Any
)
from PyQt5.QtCore import (
QPointF,
QRectF,
Qt
)
from PyQt5.QtGui import (
QBrush,
QPen,
QPainterPath,
QPolygonF,
QMouseEvent,
QPainter
)
from PyQt5.QtWidgets import (
qApp,
QGraphicsItem,
QGraphicsPathItem,
QGraphicsRectItem,
QGraphicsEllipseItem,
QStyleOptionGraphicsItem,
QWidget,
QGraphicsSceneMouseEvent,
QGraphicsSceneHoverEvent
)
from cadnano.gui.palette import getColorObj
from cadnano.views.pathview import pathstyles as styles
from cadnano.views.pathview.tools.pathselection import SelectionItemGroup
from cadnano.views.pathview import (
PathVirtualHelixItemT,
PathXoverItemT,
PathStrandItemT,
PathNucleicAcidPartItemT
)
from cadnano.cntypes import (
StrandT,
DocT,
Vec2T,
WindowT
)
_BASE_WIDTH = styles.PATH_BASE_WIDTH
PP_L5 = QPainterPath() # Left 5' PainterPath
PP_R5 = QPainterPath() # Right 5' PainterPath
PP_L3 = QPainterPath() # Left 3' PainterPath
PP_R3 = QPainterPath() # Right 3' PainterPath
PP_53 = QPainterPath() # Left 5', Right 3' PainterPath
PP_35 = QPainterPath() # Left 5', Right 3' PainterPath
PP_L5.addRect(0.25 * _BASE_WIDTH,
0.125 * _BASE_WIDTH,
0.75 * _BASE_WIDTH,
0.75 * _BASE_WIDTH)
PP_R5.addRect(0, 0.125 * _BASE_WIDTH, 0.75 * _BASE_WIDTH, 0.75 * _BASE_WIDTH)
L3_POLY = QPolygonF()
L3_POLY.append(QPointF(_BASE_WIDTH, 0))
L3_POLY.append(QPointF(0.25 * _BASE_WIDTH, 0.5 * _BASE_WIDTH))
L3_POLY.append(QPointF(_BASE_WIDTH, _BASE_WIDTH))
L3_POLY.append(QPointF(_BASE_WIDTH, 0))
PP_L3.addPolygon(L3_POLY)
R3_POLY = QPolygonF()
R3_POLY.append(QPointF(0, 0))
R3_POLY.append(QPointF(0.75 * _BASE_WIDTH, 0.5 * _BASE_WIDTH))
R3_POLY.append(QPointF(0, _BASE_WIDTH))
R3_POLY.append(QPointF(0, 0))
PP_R3.addPolygon(R3_POLY)
PP_53.addRect(0, 0.125 * _BASE_WIDTH, 0.5 * _BASE_WIDTH, 0.75 * _BASE_WIDTH)
POLY_53 = QPolygonF()
POLY_53.append(QPointF(0.5 * _BASE_WIDTH, 0))
POLY_53.append(QPointF(_BASE_WIDTH, 0.5 * _BASE_WIDTH))
POLY_53.append(QPointF(0.5 * _BASE_WIDTH, _BASE_WIDTH))
PP_53.addPolygon(POLY_53)
PP_35.addRect(0.50 * _BASE_WIDTH,
0.125 * _BASE_WIDTH,
0.5 * _BASE_WIDTH,
0.75 * _BASE_WIDTH)
POLY_35 = QPolygonF()
POLY_35.append(QPointF(0.5 * _BASE_WIDTH, 0))
POLY_35.append(QPointF(0, 0.5 * _BASE_WIDTH))
POLY_35.append(QPointF(0.5 * _BASE_WIDTH, _BASE_WIDTH))
PP_35.addPolygon(POLY_35)
_DEFAULT_RECT = QRectF(0, 0, _BASE_WIDTH, _BASE_WIDTH)
_NO_PEN = QPen(Qt.NoPen)
MOD_RECT = QRectF(.25*_BASE_WIDTH, -.25*_BASE_WIDTH, 0.5*_BASE_WIDTH, 0.5*_BASE_WIDTH)
class EndpointItem(QGraphicsPathItem):
FILTER_NAME = "endpoint"
def __init__(self, strand_item: PathStrandItemT,
cap_type: str, # low, high, dual
is_drawn5to3: bool):
"""The parent should be a StrandItem."""
super(EndpointItem, self).__init__(strand_item.virtualHelixItem())
self._strand_item = strand_item
self._getActiveTool = strand_item._getActiveTool
self.cap_type = cap_type
self._low_drag_bound = None
self._high_drag_bound = None
self._mod_item = None
self._isdrawn5to3 = is_drawn5to3
self._initCapSpecificState(is_drawn5to3)
p = QPen()
p.setCosmetic(True)
self.setPen(p)
# for easier mouseclick
self._click_area = cA = QGraphicsRectItem(_DEFAULT_RECT, self)
self._click_area.setAcceptHoverEvents(True)
cA.hoverMoveEvent = self.hoverMoveEvent
cA.mousePressEvent = self.mousePressEvent
cA.mouseMoveEvent = self.mouseMoveEvent
cA.setPen(_NO_PEN)
self.setFlag(QGraphicsItem.ItemIsSelectable)
# end def
### SIGNALS ###
### SLOTS ###
### ACCESSORS ###
def idx(self) -> int:
"""Look up ``base_idx``, as determined by :class:`StrandItem `idxs and
cap type."""
if self.cap_type == 'low':
return self._strand_item.idxs()[0]
else: # high or dual, doesn't matter
return self._strand_item.idxs()[1]
# end def
def partItem(self) -> PathNucleicAcidPartItemT:
return self._strand_item.partItem()
# end def
def disableEvents(self):
self._click_area.setAcceptHoverEvents(False)
self.mouseMoveEvent = QGraphicsPathItem.mouseMoveEvent
self.mousePressEvent = QGraphicsPathItem.mousePressEvent
# end def
def window(self) -> WindowT:
return self._strand_item.window()
### PUBLIC METHODS FOR DRAWING / LAYOUT ###
def updatePosIfNecessary(self, idx: int) -> Tuple[bool, SelectionItemGroup]:
"""Update position if necessary and return ``True`` if updated."""
group = self.group()
self.tempReparent()
x = int(idx * _BASE_WIDTH)
if x != self.x():
self.setPos(x, self.y())
# if group:
# group.addToGroup(self)
return True, group
else:
# if group:
# group.addToGroup(self)
return False, group
def safeSetPos(self, x: float, y: float):
"""
Required to ensure proper reparenting if selected
"""
group = self.group()
self.tempReparent()
self.setPos(x, y)
if group:
group.addToGroup(self)
# end def
def resetEndPoint(self, is_drawn5to3: bool):
self.setParentItem(self._strand_item.virtualHelixItem())
self._initCapSpecificState(is_drawn5to3)
upperLeftY = 0 if is_drawn5to3 else _BASE_WIDTH
self.setY(upperLeftY)
# end def
def showMod(self, mod_id: str, color: str):
self._mod_item = QGraphicsEllipseItem(MOD_RECT, self)
self.changeMod(mod_id, color)
self._mod_item.show()
# print("Showing {}".format(mod_id))
# end def
def changeMod(self, mod_id: str, color: str):
self._mod_id = mod_id
self._mod_item.setBrush(QBrush(getColorObj(color)))
# end def
def destroyMod(self):
self.scene().removeItem(self._mod_item)
self._mod_item = None
self._mod_id = None
# end def
def destroyItem(self):
'''Remove this object and references to it from the view
'''
scene = self.scene()
if self._mod_item is not None:
self.destroyMod()
scene.removeItem(self._click_area)
self._click_area = None
scene.removeItem(self)
# end def
### PRIVATE SUPPORT METHODS ###
def _initCapSpecificState(self, is_drawn5to3: bool):
c_t = self.cap_type
if c_t == 'low':
path = PP_L5 if is_drawn5to3 else PP_L3
elif c_t == 'high':
path = PP_R3 if is_drawn5to3 else PP_R5
elif c_t == 'dual':
path = PP_53 if is_drawn5to3 else PP_35
self.setPath(path)
# end def
### EVENT HANDLERS ###
def mousePressEvent(self, event: QGraphicsSceneMouseEvent):
"""Parses a :meth:`mousePressEvent`, calling the appropriate tool
method as necessary. Stores ``_move_idx`` for future comparison.
"""
self.scene().views()[0].addToPressList(self)
idx = self._strand_item.setActiveEndpoint(self.cap_type)
self._move_idx = idx
active_tool_str = self._getActiveTool().methodPrefix()
tool_method_name = active_tool_str + "MousePress"
if hasattr(self, tool_method_name): # if the tool method exists
modifiers = event.modifiers()
getattr(self, tool_method_name)(modifiers, event, self.idx())
def hoverLeaveEvent(self, event: QGraphicsSceneHoverEvent):
self._strand_item.hoverLeaveEvent(event)
# end def
def hoverMoveEvent(self, event: QGraphicsSceneHoverEvent):
"""Parses a :meth:`hoverMoveEvent`, calling the approproate tool
method as necessary.
"""
vhi_num = self._strand_item.idNum()
oligo_length = self._strand_item._model_strand.oligo().length()
msg = "%d[%d]\tlength: %d" % (vhi_num, self.idx(), oligo_length)
self.partItem().updateStatusBar(msg)
active_tool_str = self._getActiveTool().methodPrefix()
if active_tool_str == 'createTool':
return self._strand_item.createToolHoverMove(event, self.idx())
elif active_tool_str == 'addSeqTool':
return self.addSeqToolHoverMove(event, self.idx())
def mouseMoveEvent(self, event: QGraphicsSceneMouseEvent):
"""Parses a :meth:`mouseMoveEvent`, calling the appropriate tool
method as necessary. Updates ``_move_idx`` if it changed.
"""
tool_method_name = self._getActiveTool().methodPrefix() + "MouseMove"
if hasattr(self, tool_method_name): # if the tool method exists
idx = int(floor((self.x() + event.pos().x()) / _BASE_WIDTH))
if idx != self._move_idx: # did we actually move?
modifiers = event.modifiers()
self._move_idx = idx
getattr(self, tool_method_name)(modifiers, idx)
def customMouseRelease(self, event: QMouseEvent):
"""Parses a :meth:`mouseReleaseEvent` from view, calling the appropriate
tool method as necessary. Deletes ``_move_idx`` if necessary.
"""
tool_method_name = self._getActiveTool().methodPrefix() + "MouseRelease"
if hasattr(self, tool_method_name): # if the tool method exists
modifiers = event.modifiers()
x = event.pos().x()
getattr(self, tool_method_name)(modifiers, x) # call tool method
if hasattr(self, '_move_idx'):
del self._move_idx
### TOOL METHODS ###
def modsToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""
Checks that a scaffold was clicked, and then calls apply sequence
to the clicked strand via its oligo.
"""
m_strand = self._strand_item._model_strand
self._getActiveTool().applyMod(m_strand, idx)
# end def
def breakToolMouseRelease(self, modifiers: Qt.KeyboardModifiers,
x):
"""Shift-click to merge without switching back to select tool."""
m_strand = self._strand_item._model_strand
if modifiers & Qt.ShiftModifier:
m_strand.merge(self.idx())
# end def
def eraseToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Erase the strand."""
m_strand = self._strand_item._model_strand
m_strand.strandSet().removeStrand(m_strand)
# end def
def insertionToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Add an insert to the strand if possible."""
m_strand = self._strand_item._model_strand
m_strand.addInsertion(idx, 1)
# end def
def paintToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Add an insert to the strand if possible."""
m_strand = self._strand_item._model_strand
if qApp.keyboardModifiers() & Qt.ShiftModifier:
color = self.window().path_color_panel.shiftColorName()
else:
color = self.window().path_color_panel.colorName()
m_strand.oligo().applyColor(color)
# end def
def addSeqToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
oligo = self._strand_item._model_strand.oligo()
add_seq_tool = self._getActiveTool()
add_seq_tool.applySequence(oligo)
# end def
def addSeqToolHoverMove(self, event: QGraphicsSceneHoverEvent,
idx: int):
# m_strand = self._model_strand
# vhi = self._strand_item._virtual_helix_item
add_seq_tool = self._getActiveTool()
add_seq_tool.hoverMove(self, event, flag=self._isdrawn5to3)
# end def
def addSeqToolHoverLeave(self, event: QGraphicsSceneHoverEvent):
self._getActiveTool().hoverLeaveEvent(event)
# end def
def createToolHoverMove(self, idx: int):
"""Create the strand is possible."""
m_strand = self._strand_item._model_strand
vhi = self._strand_item._virtual_helix_item
active_tool = self._getActiveTool()
if not active_tool.isFloatingXoverBegin():
temp_xover = active_tool.floatingXover()
temp_xover.updateFloatingFromStrandItem(vhi, m_strand, idx)
# end def
def createToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Break the strand is possible."""
m_strand = self._strand_item._model_strand
vhi = self._strand_item._virtual_helix_item
active_tool = self._getActiveTool()
if active_tool.isFloatingXoverBegin():
if m_strand.idx5Prime() == idx:
return
else:
temp_xover = active_tool.floatingXover()
temp_xover.updateBase(vhi, m_strand, idx)
active_tool.setFloatingXoverBegin(False)
else:
active_tool.setFloatingXoverBegin(True)
# install Xover
active_tool.attemptToCreateXover(vhi, m_strand, idx)
# end def
def selectToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Set the allowed drag bounds for use by selectToolMouseMove.
"""
# print("%s.%s [%d]" % (self, util.methodName(), self.idx()))
self._low_drag_bound, self._high_drag_bound = self._strand_item._model_strand.getResizeBounds(self.idx())
s_i = self._strand_item
viewroot = s_i.viewroot()
current_filter_set = viewroot.selectionFilterSet()
if (all(f in current_filter_set for f in s_i.strandFilter()) and self.FILTER_NAME in current_filter_set):
selection_group = viewroot.strandItemSelectionGroup()
mod = Qt.MetaModifier
if not (modifiers & mod):
selection_group.clearSelection(False)
selection_group.setSelectionLock(selection_group)
selection_group.pendToAdd(self)
selection_group.processPendingToAddList()
return selection_group.mousePressEvent(event)
# end def
def selectToolMouseMove(self, modifiers: Qt.KeyboardModifiers, idx: int):
"""
Given a new index (pre-validated as different from the prev index),
calculate the new x coordinate for self, move there, and notify the
parent strandItem to redraw its horizontal line.
"""
# end def
def selectToolMouseRelease(self, modifiers: Qt.KeyboardModifiers, x):
"""
If the positional-calculated idx differs from the model idx, it means
we have moved and should notify the model to resize.
If the mouse event had a key modifier, perform special actions:
shift = attempt to merge with a neighbor
alt = extend to max drag bound
"""
m_strand = self._strand_item._model_strand
if modifiers & Qt.ShiftModifier:
self.setSelected(False)
self.restoreParent()
m_strand.merge(self.idx())
# end def
def skipToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Add an insert to the strand if possible."""
m_strand = self._strand_item._model_strand
m_strand.addInsertion(idx, -1)
# end def
def restoreParent(self, pos: QPointF = None):
"""
Required to restore parenting and positioning in the partItem
"""
# map the position
self.tempReparent(pos=pos)
self.setSelectedColor(False)
self.setSelected(False)
# end def
def tempReparent(self, pos: QPointF = None):
vh_item = self._strand_item.virtualHelixItem()
if pos is None:
pos = self.scenePos()
self.setParentItem(vh_item)
temp_point = vh_item.mapFromScene(pos)
self.setPos(temp_point)
# end def
def setSelectedColor(self, use_default: bool):
if use_default == True:
color = getColorObj(styles.SELECTED_COLOR)
else:
oligo = self._strand_item.strand().oligo()
if oligo.shouldHighlight():
color = getColorObj(oligo.getColor(), alpha=128)
else:
color = getColorObj(oligo.getColor())
brush = self.brush()
brush.setColor(color)
self.setBrush(brush)
# end def
def updateHighlight(self, brush: QBrush):
if not self.isSelected():
self.setBrush(brush)
# end def
def itemChange(self, change: QGraphicsItem.GraphicsItemChange,
value: Any) -> bool:
"""Used for selection of the :class:`EndpointItem`
Args:
change: parameter that is changing
value : new value whose type depends on the ``change`` argument
Returns:
If the change is a ``QGraphicsItem.ItemSelectedChange``::
``True`` if selected, other ``False``
Otherwise default to :meth:`QGraphicsPathItem.itemChange()` result
"""
# for selection changes test against QGraphicsItem.ItemSelectedChange
# intercept the change instead of the has changed to enable features.
if change == QGraphicsItem.ItemSelectedChange and self.scene():
active_tool = self._getActiveTool()
if str(active_tool) == "select_tool":
s_i = self._strand_item
viewroot = s_i.viewroot()
current_filter_set = viewroot.selectionFilterSet()
selection_group = viewroot.strandItemSelectionGroup()
# only add if the selection_group is not locked out
if value == True and self.FILTER_NAME in current_filter_set:
if all(f in current_filter_set for f in s_i.strandFilter()):
if self.group() != selection_group or not self.isSelected():
selection_group.pendToAdd(self)
selection_group.setSelectionLock(selection_group)
self.setSelectedColor(True)
return True
else:
return False
# end if
elif value == True:
# don't select
return False
else:
# Deselect
# print("deselect ep")
# Check if strand is being added to the selection group still
if not selection_group.isPending(self._strand_item):
selection_group.pendToRemove(self)
self.tempReparent()
self.setSelectedColor(False)
return False
else: # don't deselect, because the strand is still selected
return True
# end else
# end if
elif str(active_tool) == "paint_tool":
s_i = self._strand_item
viewroot = s_i.viewroot()
current_filter_set = viewroot.selectionFilterSet()
if all(f in current_filter_set for f in s_i.strandFilter()):
if not active_tool.isMacrod():
active_tool.setMacrod()
self.paintToolMousePress(None, None, None)
# end elif
return False
# end if
return QGraphicsPathItem.itemChange(self, change, value)
# end def
def modelDeselect(self, document: DocT):
"""A strand is selected based on whether its low or high endpoints
are selected. this value is a tuple ``(is_low, is_high)`` of booleans
"""
strand = self._strand_item.strand()
test = document.isModelStrandSelected(strand)
low_val, high_val = document.getSelectedStrandValue(strand) if test else (False, False)
if self.cap_type == 'low':
out_value = (False, high_val)
else:
out_value = (low_val, False)
if not out_value[0] and not out_value[1] and test:
document.removeStrandFromSelection(strand)
elif out_value[0] or out_value[1]:
document.addStrandToSelection(strand, out_value)
self.restoreParent()
# end def
def modelSelect(self, document: DocT):
"""A strand is selected based on whether its low or high endpoints
are selected. this value is a tuple ``(is_low, is_high)`` of booleans
"""
strand = self._strand_item.strand()
test = document.isModelStrandSelected(strand)
low_val, high_val = document.getSelectedStrandValue(strand) if test else (False, False)
if self.cap_type == 'low':
out_value = (True, high_val)
else:
out_value = (low_val, True)
self.setSelected(True)
self.setSelectedColor(True)
document.addStrandToSelection(strand, out_value)
# end def
def paint(self, painter: QPainter,
option: QStyleOptionGraphicsItem,
widget: QWidget):
painter.setPen(self.pen())
painter.setBrush(self.brush())
painter.drawPath(self.path())
# end def
|
"""This module contains a object that represents Tests for Telegram
InlineQueryResultVideo"""
import sys
if sys.version_info[0:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
sys.path.append('.')
import telegram
from tests.base import BaseTest
class InlineQueryResultVideoTest(BaseTest, unittest.TestCase):
"""This object represents Tests for Telegram InlineQueryResultVideo."""
def setUp(self):
self.id = 'id'
self.type = 'video'
self.video_url = 'video url'
self.mime_type = 'mime type'
self.video_width = 10
self.video_height = 15
self.video_duration = 15
self.thumb_url = 'thumb url'
self.title = 'title'
self.caption = 'caption'
self.description = 'description'
self.input_message_content = telegram.InputTextMessageContent('input_message_content')
self.reply_markup = telegram.InlineKeyboardMarkup([[
telegram.InlineKeyboardButton('reply_markup')
]])
self.json_dict = {
'type': self.type,
'id': self.id,
'video_url': self.video_url,
'mime_type': self.mime_type,
'video_width': self.video_width,
'video_height': self.video_height,
'video_duration': self.video_duration,
'thumb_url': self.thumb_url,
'title': self.title,
'caption': self.caption,
'description': self.description,
'input_message_content': self.input_message_content.to_dict(),
'reply_markup': self.reply_markup.to_dict(),
}
def test_video_de_json(self):
video = telegram.InlineQueryResultVideo.de_json(self.json_dict)
self.assertEqual(video.type, self.type)
self.assertEqual(video.id, self.id)
self.assertEqual(video.video_url, self.video_url)
self.assertEqual(video.mime_type, self.mime_type)
self.assertEqual(video.video_width, self.video_width)
self.assertEqual(video.video_height, self.video_height)
self.assertEqual(video.video_duration, self.video_duration)
self.assertEqual(video.thumb_url, self.thumb_url)
self.assertEqual(video.title, self.title)
self.assertEqual(video.description, self.description)
self.assertEqual(video.caption, self.caption)
self.assertDictEqual(video.input_message_content.to_dict(),
self.input_message_content.to_dict())
self.assertDictEqual(video.reply_markup.to_dict(), self.reply_markup.to_dict())
def test_video_to_json(self):
video = telegram.InlineQueryResultVideo.de_json(self.json_dict)
self.assertTrue(self.is_json(video.to_json()))
def test_video_to_dict(self):
video = telegram.InlineQueryResultVideo.de_json(self.json_dict).to_dict()
self.assertTrue(self.is_dict(video))
self.assertDictEqual(self.json_dict, video)
if __name__ == '__main__':
unittest.main()
|
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.lines import lineStyles
Light_cnames={'mistyrose':'#FFE4E1','navajowhite':'#FFDEAD','seashell':'#FFF5EE','papayawhip':'#FFEFD5','blanchedalmond':'#FFEBCD','white':'#FFFFFF','mintcream':'#F5FFFA','antiquewhite':'#FAEBD7','moccasin':'#FFE4B5','ivory':'#FFFFF0','lightgoldenrodyellow':'#FAFAD2','lightblue':'#ADD8E6','floralwhite':'#FFFAF0','ghostwhite':'#F8F8FF','honeydew':'#F0FFF0','linen':'#FAF0E6','snow':'#FFFAFA','lightcyan':'#E0FFFF','cornsilk':'#FFF8DC','bisque':'#FFE4C4','aliceblue':'#F0F8FF','gainsboro':'#DCDCDC','lemonchiffon':'#FFFACD','lightyellow':'#FFFFE0','lavenderblush':'#FFF0F5','whitesmoke':'#F5F5F5','beige':'#F5F5DC','azure':'#F0FFFF','oldlace':'#FDF5E6'}
def plot10seperate():
mons=["201603","201604","201605","201606","201607","201608","201609","201610","201611","201612","201701","201702","201703","201704","201705","201706"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
rootpath="F:/workspace/git/TranWeatherProject/data/mesonet_data/"
for mon in mons:
for day in days:
print mon+day
fileName=rootpath+mon+day+".txt"
day_data=[]
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data.append((sta_name,mon+day,data))
X=[(i*5.0/60.0) for i in range(1,len(day_data[0][2]),1)]
fig=plt.figure(1)
fig.add_subplot(10,1,1)
plt.plot(X,day_data[0][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[0][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,2)
plt.plot(X,day_data[1][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[1][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,3)
plt.plot(X,day_data[2][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[2][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,4)
plt.plot(X,day_data[3][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[3][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,5)
plt.plot(X,day_data[4][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[4][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,6)
plt.plot(X,day_data[5][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[5][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,7)
plt.plot(X,day_data[6][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[6][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,8)
plt.plot(X,day_data[7][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[7][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,9)
plt.plot(X,day_data[8][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period From 00:00am ~23:59')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[8][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,10)
plt.plot(X,day_data[9][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[9][0]+" Station Date: "+mon+day +"Temperature")
plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
plt.show()
fig.savefig('F:/workspace/git/TranWeatherProject/outputs/mesonetPlots/'+str(mon+day)+'.png')
plt.close()
import os
def plotSignle():
mons=["201603","201604","201605","201606","201607","201608","201609"]
#mons=["201604"]
#mons=["201609"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
#days=[""]
sta_names={0:"BATA",1:"SBRI",2:"WATE",3:"JORD",4:"CSQR",5:"WEST",6:"COLD",7:"SPRA",8:"COBL",9:"STEP"}
var_type="precip"
rootpath="F:/workspace/git/Graph-MP/data/mesonet_data/"+var_type+"/"
for mon in mons:
for day in days:
fileName=rootpath+mon+day+".txt"
print fileName
day_data=[]
if not os.path.exists(fileName):
continue
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data.append((sta_name,mon+day,data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
print sta_names[int(day_data[0][0])]
fig=plt.figure(1)
plt.plot(X,day_data[0][2],'b-',linewidth='1.0', markersize=5,label=sta_names[int(day_data[0][0])]+day_data[0][0])
plt.plot(X,day_data[1][2],'r-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[1][0])])+day_data[1][0])
plt.plot(X,day_data[2][2],'k-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[2][0])])+day_data[2][0])
plt.plot(X,day_data[3][2],'g-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[3][0])])+day_data[3][0])
plt.plot(X,day_data[4][2],'y-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[4][0])])+day_data[4][0])
plt.plot(X,day_data[5][2],'c-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[5][0])])+day_data[5][0])
plt.plot(X,day_data[6][2],'m-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[6][0])])+day_data[6][0])
plt.plot(X,day_data[7][2],color ='#B47CC7',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[7][0])])+day_data[7][0])
plt.plot(X,day_data[8][2],color='#FBC15E',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[8][0])])+day_data[8][0])
plt.plot(X,day_data[9][2],color='#e5ee38',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[9][0])])+day_data[9][0])
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
if var_type=="wind":
plt.ylim([-5.0,70.0])
plt.ylabel('Avg. Wind Speed(mph)')
plt.title(mon+day +"Every 5min Avg. Wind")
elif type=="temp":
plt.ylim([-10.0,100.0])
plt.ylabel('Temperature(F)')
plt.title(mon+day +"Temperature")
else:
plt.ylim([-1.0,2.0])
plt.ylabel('Precipitation Est (Inch)')
plt.title(mon+day +"Precipitation")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
print len(X)
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,each 5min')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.grid()
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/outputs/mesonetPlots/'+var_type+'_plots/'+str(mon+day)+'.png')
plt.close()
def expAvg(fileName):
expAvgs=[]
expMin=[]
expMax=[]
with open(fileName,"r") as oF:
for line in oF.readlines():
expAvgs.append(float(line.strip().split()[0]))
expMin.append(float(line.strip().split()[1]))
expMax.append(float(line.strip().split()[3]))
return expAvgs,expMin,expMax
def plotCaseDays():
dates=["20160301","20160302","20160308","20160309","20160312","20160313","20160324","20160325","20160328","20160405","20160412","20160419","20160421","20160514","20160529","20160621","20160628","20160813","20160911","20160922"]
mons=["201603","201604","201605","201606","201607","201608","201609"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
sta_names={0:"BATA",1:"SBRI",2:"WATE",3:"JORD",4:"CSQR",5:"WEST",6:"COLD",7:"SPRA",8:"COBL",9:"STEP"}
var_type="temp"
rootpath="F:/workspace/git/TranWeatherProject/data/mesonet_data/"+var_type+"/"
#expRoot="F:/workspace/git/TranWeatherProject/data/mesonet_data/mesonetExpData/statExpData/"
for mon in mons:
for day in days:
date=str(mon+day)
#expAvgs=expAvg(expRoot+mon+day+".txt")
fileName=rootpath+mon+day+".txt"
print fileName
day_data=[]
if not os.path.exists(fileName):
print "File Not Found",fileName
continue
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data.append((sta_name,mon+day,data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
labelY=[str(i) for i in range(0,100+1,5)]
print sta_names[int(day_data[0][0])]
fig=plt.figure(1)
plt.plot(X,day_data[0][2],'b-',linewidth='2.0', markersize=5,label=sta_names[int(day_data[0][0])]+day_data[0][0])
plt.plot(X,day_data[1][2],'r-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[1][0])])+day_data[1][0])
plt.plot(X,day_data[2][2],'k-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[2][0])])+day_data[2][0])
plt.plot(X,day_data[3][2],'g-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[3][0])])+day_data[3][0])
plt.plot(X,day_data[4][2],'y-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[4][0])])+day_data[4][0])
plt.plot(X,day_data[5][2],'c-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[5][0])])+day_data[5][0])
plt.plot(X,day_data[6][2],'m-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[6][0])])+day_data[6][0])
plt.plot(X,day_data[7][2],color ='#B47CC7',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[7][0])])+day_data[7][0])
plt.plot(X,day_data[8][2],color='#FBC15E',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[8][0])])+day_data[8][0])
plt.plot(X,day_data[9][2],color='#e5ee38',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[9][0])])+day_data[9][0])
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
if var_type=="wind":
#plt.ylim([-5.0,70.0])
plt.ylabel('Avg. Wind Speed(mph)')
plt.title(mon+day +"Every 5min Avg. Wind")
else:
plt.ylim([-10.0,100.0])
plt.ylabel('Temperature(F)')
plt.title(mon+day +"Temperature")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
#plt.yticks(np.arange(0, 100, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,every 5min')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.grid()
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/outputs/mesonetPlots/'+var_type+'_CaseStudy/'+str(mon+day)+'.png', dpi=300)
plt.close()
def plotSingleDays():
fileName="F:/workspace/git/Graph-MP/data/mesonet_data/test_4.txt"
sta_names={0:"BATA",1:"SBRI",2:"WATE",3:"JORD",4:"CSQR",5:"WEST",6:"COLD",7:"SPRA",8:"COBL",9:"STEP"}
day_data=[]
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:288])
day_data.append((sta_name,'201603001',data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
labelY=[str(i) for i in range(0,100+1,5)]
print sta_names[int(day_data[0][0])]
fig=plt.figure(1)
plt.plot(X,day_data[0][2],'b-',linewidth='1.0', markersize=5,label=sta_names[int(day_data[0][0])]+day_data[0][0])
plt.plot(X,day_data[1][2],'r-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[1][0])])+day_data[1][0])
plt.plot(X,day_data[2][2],'k-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[2][0])])+day_data[2][0])
plt.plot(X,day_data[3][2],'g-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[3][0])])+day_data[3][0])
plt.plot(X,day_data[4][2],'y-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[4][0])])+day_data[4][0])
plt.plot(X,day_data[5][2],'c-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[5][0])])+day_data[5][0])
plt.plot(X,day_data[6][2],'m-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[6][0])])+day_data[6][0])
plt.plot(X,day_data[7][2],color ='#B47CC7',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[7][0])])+day_data[7][0])
plt.plot(X,day_data[8][2],color='#FBC15E',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[8][0])])+day_data[8][0])
plt.plot(X,day_data[9][2],color='#e5ee38',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[9][0])])+day_data[9][0])
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-10.0,100.0])
plt.ylabel('Temperature(F)')
plt.title('201603001 ' +"Temperature")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
#plt.yticks(np.arange(0, 100, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,each 5min')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.grid()
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/data/mesonet_data/201603001_4.png', dpi=300)
plt.close()
import time
def loadTop(fileName):
results=[]
with open(fileName,"r") as rF:
for i,line in enumerate(rF.readlines()):
terms=line.strip().split(" ")
results.append((int(terms[0]),map(int,terms[1].split(",")),terms[2],map(int,terms[3].split(","))))
if i>19 :
break
return results
def plotCaseDaysSingleStation():
#dates=["20160301","20160302","20160308","20160309","20160312","20160313","20160324","20160325","20160328","20160405","20160412","20160419","20160421","20160514","20160529","20160621","20160628","20160813","20160911","20160922"]
vars=['i0','i1','i2','i3','i4','i5','i6','i7','i8','i9']
topResults=loadTop("F:/workspace/git/Graph-MP/outputs/mesonetPlots/multi_CaseStudy/CP/2/20multi_TopK_result-CP_baseMeanDiff_20_s_2_wMax_18_filter_TIncld_0.7_Top.txt")
for result in topResults:
dates=[]
top=result[0]+1
vals=result[1]
dates.append(result[2])
for i,var in enumerate(vars):
if i in vals:
exec "%s=%s"%(vars[i], 1)
else:
exec "%s=%s"%(vars[i], 0)
print i0,i1,i2,i3,i4,i5,i6,i7,i8,i9
# i0=0
# i1=0
# i2=0
# i3=1
# i4=1
# i5=1
# i6=1
# i7=0
# i8=0
# i9=0
mons=["201603","201604","201605","201606","201607","201608","201609"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
sta_names={0:"BATA",1:"SBRI",2:"WATE",3:"JORD",4:"CSQR",5:"WEST",6:"COLD",7:"SPRA",8:"COBL",9:"STEP"}
var_type="wind"
rootpath="F:/workspace/git/Graph-MP/data/mesonet_data/"+var_type+"/"
rootpath2="F:/workspace/git/Graph-MP/data/mesonet_data/temp/"
rootpath3="F:/workspace/git/Graph-MP/data/mesonet_data/precip/"
#expRoot="F:/workspace/git/TranWeatherProject/data/mesonet_data/mesonetExpData/statExpData/"
for mon in mons:
for day in days:
date=str(mon+day)
if date not in dates:
#print "Not ",date
continue
#expAvgs=expAvg(expRoot+mon+day+".txt")
fileName=rootpath+mon+day+".txt"
fileName2=rootpath2+mon+day+".txt"
fileName3=rootpath3+mon+day+".txt"
print fileName
if not os.path.exists(fileName):
print "File Not Found",fileName
continue
if not os.path.exists(fileName2):
print "File Not Found",fileName2
continue
if not os.path.exists(fileName3):
print "File Not Found",fileName2
continue
day_data=[]
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data.append((sta_name,mon+day,data))
day_data2=[]
with open(fileName2,"r") as df2:
for line in df2.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data2.append((sta_name,mon+day,data))
day_data3=[]
with open(fileName3,"r") as df3:
for line in df3.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data3.append((sta_name,mon+day,data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
labelY=[str(i) for i in range(0,100+1,5)]
print sta_names[int(day_data[0][0])]
print day_data[i3][2]
fig=plt.figure(1)
if i0!=0:
plt.plot(X,day_data[0][2],'b-',linewidth='0.5', markersize=5,label='Wind '+sta_names[int(day_data[0][0])]+day_data[0][0])
if i1!=0:
plt.plot(X,day_data[1][2],'r-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[1][0])])+day_data[1][0])
if i2!=0:
plt.plot(X,day_data[2][2],'k-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[2][0])])+day_data[2][0])
if i3!=0:
plt.plot(X,day_data[3][2],'g-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[3][0])])+day_data[3][0])
if i4!=0:
plt.plot(X,day_data[4][2],'y-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[4][0])])+day_data[4][0])
if i5!=0:
plt.plot(X,day_data[5][2],'c-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[5][0])])+day_data[5][0])
if i6!=0:
plt.plot(X,day_data[6][2],'m-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[6][0])])+day_data[6][0])
if i7!=0:
plt.plot(X,day_data[7][2],color ='#B47CC7',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[7][0])])+day_data[7][0])
if i8!=0:
plt.plot(X,day_data[8][2],color='#FBC15E',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[8][0])])+day_data[8][0])
if i9!=0:
plt.plot(X,day_data[9][2],color='#e5ee38',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[9][0])])+day_data[9][0])
plt.axvline(x=result[3][0], ymin=-1.0, ymax=50.0,color='k',linestyle='--')
plt.axvline(x=result[3][1], ymin=-1.0, ymax=50.0,color='k',linestyle='--')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-1.0,50.0])
plt.title("Top"+str(result[0]+1)+" "+mon+day +"Wind")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
plt.yticks(np.arange(-1, 50, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,each 5min')
plt.grid()
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
#plt.plot(X,day_data2[i][2],'r-',linewidth='1.0', markersize=5,label='Temp '+sta_names[int(day_data2[i][0])]+day_data2[i][0])
fig.savefig('F:/workspace/git/Graph-MP/outputs/mesonetPlots/multi_CaseStudy/mvPlots/'+str(top)+'_wind_'+str(mon+day)+'.png', dpi=300)
fig.clf()
fig=plt.figure(2)
if i0!=0:
plt.plot(X,day_data2[0][2],'b-',linewidth='0.5', markersize=5)
if i1!=0:
plt.plot(X,day_data2[1][2],'r-',linewidth='0.5', markersize=5)
if i2!=0:
plt.plot(X,day_data2[2][2],'k-',linewidth='0.5', markersize=5)
if i3!=0:
plt.plot(X,day_data2[3][2],'g-',linewidth='0.5', markersize=5)
if i4!=0:
plt.plot(X,day_data2[4][2],'y-',linewidth='0.5', markersize=5)
if i5!=0:
plt.plot(X,day_data2[5][2],'c-',linewidth='0.5', markersize=5)
if i6!=0:
plt.plot(X,day_data2[6][2],'m-',linewidth='0.5', markersize=5)
if i7!=0:
plt.plot(X,day_data2[7][2],color ='#B47CC7',linewidth='0.5', markersize=5)
if i8!=0:
plt.plot(X,day_data2[8][2],color='#FBC15E',linewidth='0.5', markersize=5)
if i9!=0:
plt.plot(X,day_data2[9][2],color='#e5ee38',linewidth='0.5', markersize=5)
# if var_type=="wind":
# plt.ylim([-1.0,50.0])
# plt.ylabel('Avg. Wind Speed(mph)')
# plt.title(mon+day +"Every 5min Avg. Wind")
# else:
# plt.ylim([-10.0,100.0])
# plt.ylabel('Temperature(F)')
# plt.title(mon+day +"Temperature")
plt.axvline(x=result[3][0], ymin=-10.0, ymax=100.0,color='k',linestyle='--')
plt.axvline(x=result[3][1], ymin=-10.0, ymax=100.0,color='k',linestyle='--')
plt.ylim([-10.0,100.0])
plt.title("Top"+str(result[0]+1)+" "+mon+day +"Temperature ")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
plt.yticks(np.arange(0, 100, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,each 5min')
plt.grid()
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/outputs/mesonetPlots/multi_CaseStudy/mvPlots/'+str(top)+'_temp_'+str(mon+day)+'.png', dpi=300)
fig.clf()
fig=plt.figure(3)
if i0!=0:
plt.plot(X,day_data3[0][2],'b-',linewidth='0.5', markersize=5)
if i1!=0:
plt.plot(X,day_data3[1][2],'r-',linewidth='0.5', markersize=5)
if i2!=0:
plt.plot(X,day_data3[2][2],'k-',linewidth='0.5', markersize=5)
if i3!=0:
plt.plot(X,day_data3[3][2],'g-',linewidth='0.5', markersize=5)
if i4!=0:
plt.plot(X,day_data3[4][2],'y-',linewidth='0.5', markersize=5)
if i5!=0:
plt.plot(X,day_data3[5][2],'c-',linewidth='0.5', markersize=5)
if i6!=0:
plt.plot(X,day_data3[6][2],'m-',linewidth='0.5', markersize=5)
if i7!=0:
plt.plot(X,day_data3[7][2],color ='#B47CC7',linewidth='0.5', markersize=5)
if i8!=0:
plt.plot(X,day_data3[8][2],color='#FBC15E',linewidth='0.5', markersize=5)
if i9!=0:
plt.plot(X,day_data3[9][2],color='#e5ee38',linewidth='0.5', markersize=5)
# if var_type=="wind":
# plt.ylim([-1.0,50.0])
# plt.ylabel('Avg. Wind Speed(mph)')
# plt.title(mon+day +"Every 5min Avg. Wind")
# else:
# plt.ylim([-10.0,100.0])
# plt.ylabel('Temperature(F)')
# plt.title(mon+day +"Temperature")
plt.axvline(x=result[3][0], ymin=-0.2, ymax=2.0,color='k',linestyle='--')
plt.axvline(x=result[3][1], ymin=-0.2, ymax=2.0,color='k',linestyle='--')
plt.ylim([-0.2,2.0])
plt.title("Top"+str(result[0]+1)+" "+mon+day +"Precipitation ")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
#plt.yticks(np.arange(-0.2, 2.0, 0.5),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,each 5min')
plt.grid()
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/outputs/mesonetPlots/multi_CaseStudy/mvPlots/'+str(top)+'_precip_'+str(mon+day)+'.png', dpi=300)
fig.clf()
plt.close()
def plotAllDays():
root="F:/workspace/git/WeatherTransportationProject/"
#dates=["20160301","20160302","20160308","20160309","20160312","20160313","20160324","20160325","20160328","20160405","20160412","20160419","20160421","20160514","20160529","20160621","20160628","20160813","20160911","20160922"]
dates=[]
#"201603","201604","201605","201606","201607","201608"
mons=["201609","201610","201611","201612","201701","201702","201703","201704","201705","201706"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
sta_names={0:"BATA",1:"SBRI",2:"WATE",3:"JORD",4:"CSQR",5:"WEST",6:"COLD",7:"SPRA",8:"COBL",9:"STEP"}
var_types=["temp","temp9","press","wind","windDir","windMax","rh","rad"]
#var_types=["wind"]
for var_type in var_types:
rootpath=root+"data/mesonet_data/"+var_type+"/"
#expRoot="F:/workspace/git/Graph-MP/data/mesonet_data/mesonetExpData/statExpData/"
for mon in mons:
for day in days:
date=str(mon+day)
fileName=rootpath+mon+day+".txt"
print fileName
day_data=[]
if not os.path.exists(fileName):
print "File Not Found",fileName
continue
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data.append((sta_name,mon+day,data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
labelY=[str(i) for i in range(0,100+1,5)]
print sta_names[int(day_data[0][0])]
fig=plt.figure(1)
plt.plot(X,day_data[0][2],'b-',linewidth='1.5', markersize=5,label=sta_names[int(day_data[0][0])]+day_data[0][0])
plt.plot(X,day_data[1][2],'r-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[1][0])])+day_data[1][0])
plt.plot(X,day_data[2][2],'k-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[2][0])])+day_data[2][0])
plt.plot(X,day_data[3][2],'g-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[3][0])])+day_data[3][0])
plt.plot(X,day_data[4][2],'y-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[4][0])])+day_data[4][0])
plt.plot(X,day_data[5][2],'c-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[5][0])])+day_data[5][0])
plt.plot(X,day_data[6][2],'m-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[6][0])])+day_data[6][0])
plt.plot(X,day_data[7][2],color ='#B47CC7',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[7][0])])+day_data[7][0])
plt.plot(X,day_data[8][2],color='#FBC15E',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[8][0])])+day_data[8][0])
plt.plot(X,day_data[9][2],color='#e5ee38',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[9][0])])+day_data[9][0])
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
if var_type=="wind":
plt.ylim([-5.0,70.0])
plt.ylabel('Average Wind Speed(mph)')
plt.title(mon+day +" Every 5min Average Wind Speed")
elif var_type=="windMax":
plt.ylim([-5.0,70.0])
plt.ylabel('Max Wind Speed(mph)')
plt.title(mon+day +"Every 5min Max Wind")
elif var_type=="windDir":
#plt.ylim([-5.0,70.0])
plt.ylabel('Max Wind Speed(mph)')
plt.title(mon+day +" Wind Direction Degree")
elif var_type=="temp":
plt.ylim([-10.0,100.0])
plt.ylabel('Temperature(F)')
plt.title(mon+day +" 2m Temperature")
elif var_type=="temp9":
plt.ylim([-10.0,100.0])
plt.ylabel('Temperature(F)')
plt.title(mon+day +" 9m Temperature")
elif var_type=="press":
#plt.ylim([-10.0,100.0])
plt.ylabel('Pressure(mbar)')
plt.title(mon+day +" Pressure")
elif var_type=="rad":
#plt.ylim([-10.0,100.0])
plt.ylabel('Solar Radiation(W/m^2)')
plt.title(mon+day +" Solar Radiation")
elif var_type=="rh":
plt.ylim([0.0,100.0])
plt.ylabel('Relative Humidity %')
plt.title(mon+day +" rh")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
#plt.yticks(np.arange(0, 100, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,every 5min')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=10)
plt.grid()
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig(root+'/outputs/mesonetPlots/'+var_type+'_plots/'+str(mon+day)+'.png')
plt.close()
def plotTravTimeAllDays():
import matplotlib
#dates=["20160301","20160302","20160308","20160309","20160312","20160313","20160324","20160325","20160328","20160405","20160412","20160419","20160421","20160514","20160529","20160621","20160628","20160813","20160911","20160922"]
dates=[]
mons=["201603","201604","201605","201606","201607","201608","201609"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
var_types=["TravelTimeToWest","TravelTimeToWest"]
#var_types=["wind"]
colors=[]
for name, hex in matplotlib.colors.cnames.iteritems():
if name not in Light_cnames.keys():
colors.append(hex)
for var_type in var_types:
rootpath="F:/workspace/git/Graph-MP/data/trafficData/I90_TravelTime/"+var_type+"/"
#expRoot="F:/workspace/git/Graph-MP/data/mesonet_data/mesonetExpData/statExpData/"
for mon in mons:
for day in days:
date=str(mon+day)
fileName=rootpath+mon+day+".txt"
print fileName
day_data=[]
if not os.path.exists(fileName):
print "File Not Found",fileName
continue
with open(fileName,"r") as df:
for idx,line in enumerate(df.readlines()):
terms=line.strip().split()
sta_name="TMC "+str(idx)
data=map(float,terms)
day_data.append((sta_name,mon+day,data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
labelY=[str(i) for i in range(0,100+1,5)]
print len(day_data)
fig=plt.figure(1)
for i in range(len(day_data)):
plt.plot(X,day_data[i][2],colors[i],linewidth='0.5', markersize=5,label=day_data[i][0])
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylabel('Traveling Time (sec)')
if var_type=="TravelTimeToWest":
plt.title(mon+day +" Travel Time I90 East To West")
else:
plt.title(mon+day +" Travel Time I90 West To East")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
#plt.yticks(np.arange(0, 100, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time: 00:00 ~ 23:59,every 5min')
#plt.xlim([0.2,0.0])
plt.ylim([0.0,3600.0])
plt.grid()
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/outputs/trafficData/'+var_type+'_plots/'+str(mon+day)+'.png')
plt.close()
plotAllDays()
|
import cgitb, cgi; cgitb.enable()
from classes import Factory
fieldStorage = cgi.FieldStorage()
factory = Factory.Factory()
webApp = factory.makeWebApp(fieldStorage)
def outputHeaders():
print "Content-Type: text/html"
print # signals end of headers
outputHeaders()
print webApp.getOutput()
|
f = open('test.txt')
s = f.read()
print(s)
|
"""
Schema support plugin for PostgreSQL backends.
"""
__all__ = ['Behavior']
import os
from gnue.common.apps import errors
from gnue.common.datasources import GSchema
from gnue.common.datasources.drivers import DBSIG2
class Behavior (DBSIG2.Behavior):
"""
Behavior class for PostgreSQL backends.
"""
# ---------------------------------------------------------------------------
# Constructor
# ---------------------------------------------------------------------------
def __init__ (self, *args, **kwargs):
DBSIG2.Behavior.__init__ (self, *args, **kwargs)
self.__RELKIND = {'v': {'type': 'view', 'name': u_("Views")},
'r': {'type': 'table', 'name': u_("Tables")}}
# Build typemap: {nativetype: (group, fieldtype)}
self.__TYPEMAP = {'date' : ('date', 'date'),
'bool' : ('boolean', 'boolean'),
'string': ('string', 'string')}
for item in ['numeric', 'float4', 'float8', 'money', 'int8',
'int2', 'int4', 'serial']:
self.__TYPEMAP [item] = ('number', 'number')
for item in ['time', 'reltime']:
self.__TYPEMAP [item] = ('date', 'time')
for item in ['timestamp', 'abstime']:
self.__TYPEMAP [item] = ('date', 'datetime')
self._maxIdLength_ = 31
self._alterMultiple_ = False
self._numbers_ = [[(4, 'smallint'), (9, 'integer'), (18, 'bigint')],
"numeric (%s,0)", "numeric (%(length)s,%(scale)s)"]
self._type2native_.update ({'boolean' : 'boolean',
'datetime': 'timestamp without time zone'})
# ---------------------------------------------------------------------------
# Create a new database
# ---------------------------------------------------------------------------
def _createDatabase_ (self):
"""
Create the requested user and database using the tools 'createuser',
'createdb' and 'dropuser'. Of course this function should better make use
of the template1 database using a connection object.
"""
dbname = self.__connection.parameters.get ('dbname')
username = self.__connection.parameters.get ('username', 'gnue')
password = self.__connection.parameters.get ('password')
host = self.__connection.parameters.get ('host')
port = self.__connection.parameters.get ('port')
owner = self.__connection.parameters.get ('owner', username)
ownerpwd = self.__connection.parameters.get ('ownerpwd')
site = ""
if host is not None:
site += " --host=%s" % host
if port is not None:
site += " --port=%s" % port
# First, let's connect to template1 using the given username and password
self.__connection.parameters ['dbname'] = 'template1'
self.__connection.manager.loginToConnection (self.__connection)
# Then have a look wether the requested owner is already available
result = self.__connection.sql ('SELECT usesysid FROM pg_user ' \
'WHERE usename = %(owner)s', {'owner': owner})
if not result:
cmd = 'CREATE USER %s' % owner
if ownerpwd:
cmd += " WITH PASSWORD '%s'" % ownerpwd
self.__connection.sql0 (cmd)
self.__connection.commit ()
# Now go and create that new database
cmd = "ABORT; CREATE DATABASE %s WITH OWNER %s ENCODING = 'UNICODE'; BEGIN"
self.__connection.sql0 (cmd % (dbname, owner))
self.__connection.commit ()
self.__connection.close ()
# Since the newly created database should be available now, connect to it
# using the given owner
self.__connection.parameters ['dbname'] = dbname
self.__connection.parameters ['username'] = owner
if ownerpwd:
self.__connection.parameters ['password'] = ownerpwd
else:
if 'password' in self.__connection.parameters:
del self.__connection.parameters ['password']
self.__connection.manager.loginToConnection (self.__connection)
# ---------------------------------------------------------------------------
# Read the current connection's schema
# ---------------------------------------------------------------------------
def _readSchema_ (self, parent):
"""
Read the connection's schema and build a GSchema object tree connected to
the given parent object (which is of type GSSchema).
"""
tables = self.__readTables (parent)
fields = self.__readFields (tables)
self.__readDefaults (fields)
self.__readKeys (tables)
self.__readConstraints (tables, fields)
# ---------------------------------------------------------------------------
# Read all table-like elements
# ---------------------------------------------------------------------------
def __readTables (self, parent):
mapping = {} # Maps OIDs to GSTable instances
tables = None
views = None
cmd = u"SELECT c.oid, c.relname, c.relkind " \
"FROM pg_class c, pg_namespace n " \
"WHERE n.nspname = 'public' AND n.oid = c.relnamespace AND " \
" c.relkind in (%s) " \
"ORDER BY c.relname" \
% ','.join (["%r" % kind for kind in self.__RELKIND.keys ()])
cursor = self.__connection.makecursor (cmd)
try:
for (oid, relname, relkind) in cursor.fetchall ():
kind = self.__RELKIND [relkind] ['type']
properties = {'id': oid, 'name': relname, 'kind': kind}
if relkind == 'v':
if views is None:
views = GSchema.GSTables (parent, **self.__RELKIND [relkind])
master = views
else:
if tables is None:
tables = GSchema.GSTables (parent, **self.__RELKIND [relkind])
master = tables
table = GSchema.GSTable (master, **properties)
# Maintain a temporary mapping from OID's to GSTable instances so
# adding fields afterwards runs faster
mapping [oid] = table
finally:
cursor.close ()
return mapping
# ---------------------------------------------------------------------------
# Find all fields
# ---------------------------------------------------------------------------
def __readFields (self, tables):
cmd = u"SELECT attrelid, attname, t.typname, attnotnull, " \
" atthasdef, atttypmod, attnum, attlen " \
"FROM pg_attribute a " \
"LEFT OUTER JOIN pg_type t ON t.oid = a.atttypid " \
"WHERE attnum >= 0 AND attisdropped = False " \
"ORDER BY attrelid, attnum"
cursor = self.__connection.makecursor (cmd)
fields = None
result = {}
try:
for rs in cursor.fetchall ():
(relid, name, typename, notnull, hasdef, typemod, attnum, attlen) = rs
# only process attributes from tables we've listed before
if not relid in tables:
continue
attrs = {'id' : "%s.%s" % (relid, attnum),
'name' : name,
'nativetype': typename,
'nullable' : hasdef or not notnull}
if typename.lower () in self.__TYPEMAP:
(group, attrs ['type']) = self.__TYPEMAP [typename.lower ()]
else:
(group, attrs ['type']) = self.__TYPEMAP ['string']
if group == 'number':
if typemod != -1:
value = typemod - 4
attrs ['length'] = value >> 16
attrs ['precision'] = value & 0xFFFF
elif attlen > 0:
attrs ['length'] = len ("%s" % 2L ** (attlen * 8))
elif typemod != -1:
attrs ['length'] = typemod - 4
# Remove obsolete attributes
if group in ['date', 'boolean']:
for item in ['length', 'precision']:
if item in attrs:
del attrs [item]
elif group in ['string']:
if 'precision' in attrs:
del attrs ['precision']
table = tables [relid]
fields = table.findChildOfType ('GSFields')
if fields is None:
fields = GSchema.GSFields (table)
result [attrs ['id']] = GSchema.GSField (fields, **attrs)
finally:
cursor.close ()
return result
# ---------------------------------------------------------------------------
# Read defaults and apply them to the given fields
# ---------------------------------------------------------------------------
def __readDefaults (self, fields):
cmd = u"SELECT adrelid, adnum, adsrc FROM pg_attrdef ORDER BY adrelid"
cursor = self.__connection.makecursor (cmd)
try:
for (relid, fieldnum, source) in cursor.fetchall ():
field = fields.get ("%s.%s" % (relid, fieldnum))
# Skip all defaults of not listed fields
if field is None:
continue
if source [:8] == 'nextval(':
field.defaultwith = 'serial'
elif source == 'now()':
field.defaultwith = 'timestamp'
else:
field.defaultwith = 'constant'
field.default = source.split ('::') [0].strip ("'")
finally:
cursor.close ()
# ---------------------------------------------------------------------------
# Read all indices and associate them with their table/view
# ---------------------------------------------------------------------------
def __readKeys (self, tables):
cmd = u"SELECT indrelid, indkey, indisunique, indisprimary, c.relname " \
"FROM pg_index i LEFT OUTER JOIN pg_class c ON c.oid = indexrelid"
cursor = self.__connection.makecursor (cmd)
try:
for (relid, fieldvec, isUnique, isPrimary, name) in cursor.fetchall ():
# Skip functional indices. A functional index is an index that is built
# upon a fuction manipulating a field upper(userid) vs userid
fields = [int (i) - 1 for i in fieldvec.split ()]
if not fields:
continue
# only process keys of listed tables
table = tables.get (relid)
if table is None:
continue
if isPrimary:
index = GSchema.GSPrimaryKey (table, name = name)
fClass = GSchema.GSPKField
else:
indices = table.findChildOfType ('GSIndexes')
if indices is None:
indices = GSchema.GSIndexes (table)
index = GSchema.GSIndex (indices, unique = isUnique, name = name)
fClass = GSchema.GSIndexField
fieldList = table.findChildrenOfType ('GSField', False, True)
for find in fields:
fClass (index, name = fieldList [find].name)
finally:
cursor.close ()
# ---------------------------------------------------------------------------
# Read all constraints
# ---------------------------------------------------------------------------
def __readConstraints (self, tables, fields):
cmd = u"SELECT conname, conrelid, confrelid, conkey, confkey, contype " \
"FROM pg_constraint WHERE contype in ('f', 'u')"
cursor = self.__connection.makecursor (cmd)
try:
for (name, relid, fkrel, key, fkey, ctype) in cursor.fetchall ():
table = tables.get (relid)
if ctype == 'f':
fktable = tables.get (fkrel)
# We need both ends of a relation to be a valid constraint
if table is None or fktable is None:
continue
parent = table.findChildOfType ('GSConstraints')
if parent is None:
parent = GSchema.GSConstraints (table)
constr = GSchema.GSForeignKey (parent, name = name,
references = fktable.name)
kp = isinstance (key, basestring) and key [1:-1].split (',') or key
fkp = isinstance (fkey, basestring) and fkey [1:-1].split(',') or fkey
k = [fields ["%s.%s" % (relid, i)].name for i in kp]
f = [fields ["%s.%s" % (fkrel, i)].name for i in fkp]
for (name, refname) in zip (k, f):
GSchema.GSFKField (constr, name = name, references = refname)
# Unique-Constraint
elif ctype == 'u':
parent = table.findChildOfType ('GSConstraints') or \
GSchema.GSConstraints (table)
constr = GSchema.GSUnique (parent, name = name)
kp = isinstance (key, basestring) and key [1:-1].split (',') or key
for name in [fields ["%s.%s" % (relid, i)].name for i in kp]:
GSchema.GSUQField (constr, name = name)
# Ok, since we know PostgreSQL automatically creates a unique index
# of the same name, we drop that index since it would only confuse a
# later diff
for ix in table.findChildrenOfType ('GSIndex', False, True):
if ix.name == constr.name:
parent = ix.getParent ()
parent._children.remove (ix)
ix.setParent (None)
finally:
cursor.close ()
# ---------------------------------------------------------------------------
# Handle special defaults
# ---------------------------------------------------------------------------
def _defaultwith_ (self, code, field):
"""
Create a sequence for 'serials' and set the default for 'timestamps'.
@param code: code-triple to get the result
@param field: GSField instance of the field having the default
"""
if field.defaultwith == 'serial':
seq = self._getSequenceName (field)
code [0].append (u"CREATE SEQUENCE %s" % seq)
field.default = "DEFAULT nextval ('%s')" % seq
elif field.defaultwith == 'timestamp':
field.default = "DEFAULT now()"
|
from miasm.core.utils import size2mask
from miasm.expression.expression import ExprInt, ExprCond, ExprCompose, \
TOK_EQUAL
def simp_ext(_, expr):
if expr.op.startswith('zeroExt_'):
arg = expr.args[0]
if expr.size == arg.size:
return arg
return ExprCompose(arg, ExprInt(0, expr.size - arg.size))
if expr.op.startswith("signExt_"):
arg = expr.args[0]
add_size = expr.size - arg.size
new_expr = ExprCompose(
arg,
ExprCond(
arg.msb(),
ExprInt(size2mask(add_size), add_size),
ExprInt(0, add_size)
)
)
return new_expr
return expr
def simp_flags(_, expr):
args = expr.args
if expr.is_op("FLAG_EQ"):
return ExprCond(args[0], ExprInt(0, 1), ExprInt(1, 1))
elif expr.is_op("FLAG_EQ_AND"):
op1, op2 = args
return ExprCond(op1 & op2, ExprInt(0, 1), ExprInt(1, 1))
elif expr.is_op("FLAG_SIGN_SUB"):
return (args[0] - args[1]).msb()
elif expr.is_op("FLAG_EQ_CMP"):
return ExprCond(
args[0] - args[1],
ExprInt(0, 1),
ExprInt(1, 1),
)
elif expr.is_op("FLAG_ADD_CF"):
op1, op2 = args
res = op1 + op2
return (((op1 ^ op2) ^ res) ^ ((op1 ^ res) & (~(op1 ^ op2)))).msb()
elif expr.is_op("FLAG_SUB_CF"):
op1, op2 = args
res = op1 - op2
return (((op1 ^ op2) ^ res) ^ ((op1 ^ res) & (op1 ^ op2))).msb()
elif expr.is_op("FLAG_ADD_OF"):
op1, op2 = args
res = op1 + op2
return (((op1 ^ res) & (~(op1 ^ op2)))).msb()
elif expr.is_op("FLAG_SUB_OF"):
op1, op2 = args
res = op1 - op2
return (((op1 ^ res) & (op1 ^ op2))).msb()
elif expr.is_op("FLAG_EQ_ADDWC"):
op1, op2, op3 = args
return ExprCond(
op1 + op2 + op3.zeroExtend(op1.size),
ExprInt(0, 1),
ExprInt(1, 1),
)
elif expr.is_op("FLAG_ADDWC_OF"):
op1, op2, op3 = args
res = op1 + op2 + op3.zeroExtend(op1.size)
return (((op1 ^ res) & (~(op1 ^ op2)))).msb()
elif expr.is_op("FLAG_SUBWC_OF"):
op1, op2, op3 = args
res = op1 - (op2 + op3.zeroExtend(op1.size))
return (((op1 ^ res) & (op1 ^ op2))).msb()
elif expr.is_op("FLAG_ADDWC_CF"):
op1, op2, op3 = args
res = op1 + op2 + op3.zeroExtend(op1.size)
return (((op1 ^ op2) ^ res) ^ ((op1 ^ res) & (~(op1 ^ op2)))).msb()
elif expr.is_op("FLAG_SUBWC_CF"):
op1, op2, op3 = args
res = op1 - (op2 + op3.zeroExtend(op1.size))
return (((op1 ^ op2) ^ res) ^ ((op1 ^ res) & (op1 ^ op2))).msb()
elif expr.is_op("FLAG_SIGN_ADDWC"):
op1, op2, op3 = args
return (op1 + op2 + op3.zeroExtend(op1.size)).msb()
elif expr.is_op("FLAG_SIGN_SUBWC"):
op1, op2, op3 = args
return (op1 - (op2 + op3.zeroExtend(op1.size))).msb()
elif expr.is_op("FLAG_EQ_SUBWC"):
op1, op2, op3 = args
res = op1 - (op2 + op3.zeroExtend(op1.size))
return ExprCond(res, ExprInt(0, 1), ExprInt(1, 1))
elif expr.is_op("CC_U<="):
op_cf, op_zf = args
return op_cf | op_zf
elif expr.is_op("CC_U>="):
op_cf, = args
return ~op_cf
elif expr.is_op("CC_S<"):
op_nf, op_of = args
return op_nf ^ op_of
elif expr.is_op("CC_S>"):
op_nf, op_of, op_zf = args
return ~(op_zf | (op_nf ^ op_of))
elif expr.is_op("CC_S<="):
op_nf, op_of, op_zf = args
return op_zf | (op_nf ^ op_of)
elif expr.is_op("CC_S>="):
op_nf, op_of = args
return ~(op_nf ^ op_of)
elif expr.is_op("CC_U>"):
op_cf, op_zf = args
return ~(op_cf | op_zf)
elif expr.is_op("CC_U<"):
op_cf, = args
return op_cf
elif expr.is_op("CC_NEG"):
op_nf, = args
return op_nf
elif expr.is_op("CC_EQ"):
op_zf, = args
return op_zf
elif expr.is_op("CC_NE"):
op_zf, = args
return ~op_zf
elif expr.is_op("CC_POS"):
op_nf, = args
return ~op_nf
return expr
|
from __future__ import absolute_import
import logging
import time
import six
from vdsm.storage import constants as sc
from vdsm.storage import exception
_SIZE = "SIZE"
ATTRIBUTES = {
sc.DOMAIN: ("domain", str),
sc.IMAGE: ("image", str),
sc.PUUID: ("parent", str),
sc.CAPACITY: ("capacity", int),
sc.FORMAT: ("format", str),
sc.TYPE: ("type", str),
sc.VOLTYPE: ("voltype", str),
sc.DISKTYPE: ("disktype", str),
sc.DESCRIPTION: ("description", str),
sc.LEGALITY: ("legality", str),
sc.CTIME: ("ctime", int),
sc.GENERATION: ("generation", int),
sc.SEQUENCE: ("sequence", int),
}
def _lines_to_dict(lines):
md = {}
errors = []
for line in lines:
# Skip a line if there is invalid value.
try:
line = line.decode("utf-8")
except UnicodeDecodeError as e:
errors.append("Invalid line '{}': {}".format(line, e))
continue
if line.startswith("EOF"):
break
if '=' not in line:
continue
key, value = line.split('=', 1)
md[key.strip()] = value.strip()
return md, errors
def parse(lines):
md, errors = _lines_to_dict(lines)
metadata = {}
if "NONE" in md:
# Before 4.20.34-1 (ovirt 4.2.5) volume metadata could be
# cleared by writing invalid metadata when deleting a volume.
# See https://bugzilla.redhat.com/1574631.
errors.append(str(exception.MetadataCleared()))
return {}, errors
# We work internally in bytes, even if old format store
# value in blocks, we will read SIZE instead of CAPACITY
# from non-converted volumes and use it
if _SIZE in md and sc.CAPACITY not in md:
try:
md[sc.CAPACITY] = int(md[_SIZE]) * sc.BLOCK_SIZE_512
except ValueError as e:
errors.append(str(e))
if sc.GENERATION not in md:
md[sc.GENERATION] = sc.DEFAULT_GENERATION
if sc.SEQUENCE not in md:
md[sc.SEQUENCE] = sc.DEFAULT_SEQUENCE
for key, (name, validate) in ATTRIBUTES.items():
try:
# FIXME: remove pylint skip when bug fixed:
# https://github.com/PyCQA/pylint/issues/5113
metadata[name] = validate(md[key]) # pylint: disable=not-callable
except KeyError:
errors.append("Required key '{}' is missing.".format(name))
except ValueError as e:
errors.append("Invalid '{}' value: {}".format(name, str(e)))
return metadata, errors
def dump(lines):
md, errors = parse(lines)
if errors:
logging.warning(
"Invalid metadata found errors=%s", errors)
md["status"] = sc.VOL_STATUS_INVALID
else:
md["status"] = sc.VOL_STATUS_OK
# Do not include domain in dump output.
md.pop("domain", None)
return md
class VolumeMetadata(object):
log = logging.getLogger('storage.volumemetadata')
def __init__(self, domain, image, parent, capacity, format, type, voltype,
disktype, description="", legality=sc.ILLEGAL_VOL, ctime=None,
generation=sc.DEFAULT_GENERATION,
sequence=sc.DEFAULT_SEQUENCE):
# Storage domain UUID
self.domain = domain
# Image UUID
self.image = image
# UUID of the parent volume or BLANK_UUID
self.parent = parent
# Volume capacity in bytes
self.capacity = capacity
# Format (RAW or COW)
self.format = format
# Allocation policy (PREALLOCATED or SPARSE)
self.type = type
# Relationship to other volumes (LEAF, INTERNAL or SHARED)
self.voltype = voltype
# Intended usage of this volume (unused)
self.disktype = disktype
# Free-form description and may be used to store extra metadata
self.description = description
# Indicates if the volume contents should be considered valid
self.legality = legality
# Volume creation time (in seconds since the epoch)
self.ctime = int(time.time()) if ctime is None else ctime
# Generation increments each time certain operations complete
self.generation = generation
# Sequence number of the volume, increased every time a new volume is
# created in an image.
self.sequence = sequence
@classmethod
def from_lines(cls, lines):
'''
Instantiates a VolumeMetadata object from storage read bytes.
Args:
lines: list of key=value entries given as bytes read from storage
metadata section. "EOF" entry terminates parsing.
'''
metadata, errors = parse(lines)
if errors:
raise exception.InvalidMetadata(
"lines={} errors={}".format(lines, errors))
return cls(**metadata)
@property
def description(self):
return self._description
@description.setter
def description(self, desc):
self._description = self.validate_description(desc)
@property
def capacity(self):
return self._capacity
@capacity.setter
def capacity(self, value):
self._capacity = self._validate_integer("capacity", value)
@property
def ctime(self):
return self._ctime
@ctime.setter
def ctime(self, value):
self._ctime = self._validate_integer("ctime", value)
@property
def generation(self):
return self._generation
@generation.setter
def generation(self, value):
self._generation = self._validate_integer("generation", value)
@property
def sequence(self):
return self._sequence
@sequence.setter
def sequence(self, value):
self._sequence = self._validate_integer("sequence", value)
@classmethod
def _validate_integer(cls, property, value):
if not isinstance(value, six.integer_types):
raise AssertionError(
"Invalid value for metadata property {!r}: {!r}".format(
property, value))
return value
@classmethod
def validate_description(cls, desc):
desc = str(desc)
# We cannot fail when the description is too long, since we must
# support older engine that may send such values, or old disks
# with long description.
if len(desc) > sc.DESCRIPTION_SIZE:
cls.log.warning("Description is too long, truncating to %d bytes",
sc.DESCRIPTION_SIZE)
desc = desc[:sc.DESCRIPTION_SIZE]
return desc
def storage_format(self, domain_version, **overrides):
"""
Format metadata parameters into storage format bytes.
VolumeMetadata is quite restrictive and does not allow
you to make an invalid metadata, but sometimes, for example
for a format conversion, you need some additional fields to
be written to the storage. Those fields can be added using
overrides dict.
Raises MetadataOverflowError if formatted metadata is too long.
"""
info = {
sc.CTIME: str(self.ctime),
sc.DESCRIPTION: self.description,
sc.DISKTYPE: self.disktype,
sc.DOMAIN: self.domain,
sc.FORMAT: self.format,
sc.GENERATION: self.generation,
sc.IMAGE: self.image,
sc.LEGALITY: self.legality,
sc.PUUID: self.parent,
sc.TYPE: self.type,
sc.VOLTYPE: self.voltype,
}
if domain_version < 5:
# Always zero on pre v5 domains
# We need to keep MTIME available on pre v5
# domains, as other code is expecting that
# field to exists and will fail without it.
info[sc.MTIME] = 0
# Pre v5 domains should have SIZE in blocks
# instead of CAPACITY in bytes
info[_SIZE] = self.capacity // sc.BLOCK_SIZE_512
else:
info[sc.CAPACITY] = self.capacity
info[sc.SEQUENCE] = self.sequence
info.update(overrides)
keys = sorted(info.keys())
lines = ["%s=%s\n" % (key, info[key]) for key in keys]
lines.append("EOF\n")
data = "".join(lines).encode("utf-8")
if len(data) > sc.METADATA_SIZE:
raise exception.MetadataOverflowError(data)
return data
# Three defs below allow us to imitate a dictionary
# So intstead of providing a method to return a dictionary
# with values, we return self and mimick dict behaviour.
# In the fieldmap we keep mapping between metadata
# field name and our internal field names
#
# TODO: All dict specific code below should be removed, when rest of VDSM
# will be refactored, to use VolumeMetadata properties, instead of dict
_fieldmap = {
sc.FORMAT: 'format',
sc.TYPE: 'type',
sc.VOLTYPE: 'voltype',
sc.DISKTYPE: 'disktype',
sc.CAPACITY: 'capacity',
sc.CTIME: 'ctime',
sc.DOMAIN: 'domain',
sc.IMAGE: 'image',
sc.DESCRIPTION: 'description',
sc.PUUID: 'parent',
sc.LEGALITY: 'legality',
sc.GENERATION: 'generation',
sc.SEQUENCE: "sequence",
}
def __getitem__(self, item):
try:
value = getattr(self, self._fieldmap[item])
except AttributeError:
raise KeyError(item)
# Some fields needs to be converted to string
if item in (sc.CAPACITY, sc.CTIME):
value = str(value)
return value
def __setitem__(self, item, value):
setattr(self, self._fieldmap[item], value)
def get(self, item, default=None):
try:
return self[item]
except KeyError:
return default
def dump(self):
return {
"capacity": self.capacity,
"ctime": self.ctime,
"description": self.description,
"disktype": self.disktype,
"format": self.format,
"generation": self.generation,
"sequence": self.sequence,
"image": self.image,
"legality": self.legality,
"parent": self.parent,
"type": self.type,
"voltype": self.voltype,
}
|
class Infix(object):
def __init__(self, function):
self.function = function
def __ror__(self, other):
return Infix(lambda x: self.function(other, x))
def __or__(self, other):
return self.function(other)
def __rlshift__(self, other):
return Infix(lambda x, self=self, other=other: self.function(other, x))
def __rshift__(self, other):
return self.function(other)
def __call__(self, value1, value2):
return self.function(value1, value2)
|
"""
Represents a group of conduits
Copyright: John Stowers, 2007
License: GPLv2
"""
import traceback
import os
import xml.dom.minidom
import gobject
import logging
log = logging.getLogger("SyncSet")
import conduit
import conduit.Conduit as Conduit
import conduit.Settings as Settings
import conduit.XMLSerialization as XMLSerialization
SETTINGS_VERSION = XMLSerialization.Settings.XML_VERSION
class SyncSet(gobject.GObject):
"""
Represents a group of conduits
"""
__gsignals__ = {
#Fired when a new instantiatable DP becomes available. It is described via
#a wrapper because we do not actually instantiate it till later - to save memory
"conduit-added" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, [
gobject.TYPE_PYOBJECT]), # The ConduitModel that was added
"conduit-removed" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, [
gobject.TYPE_PYOBJECT]), # The ConduitModel that was removed
}
def __init__(self, moduleManager, syncManager, xmlSettingFilePath="settings.xml"):
gobject.GObject.__init__(self)
self.moduleManager = moduleManager
self.syncManager = syncManager
self.xmlSettingFilePath = xmlSettingFilePath
self.conduits = []
self.moduleManager.connect("dataprovider-available", self.on_dataprovider_available_unavailable)
self.moduleManager.connect("dataprovider-unavailable", self.on_dataprovider_available_unavailable)
# FIXME: temporary hack - need to let factories know about this factory :-\!
self.moduleManager.emit("syncset-added", self)
def _restore_dataprovider(self, cond, wrapperKey, dpName="", dpxml="", trySourceFirst=True):
"""
Adds the dataprovider back onto the canvas at the specifed
location and configures it with the given settings
"""
log.debug("Restoring %s to (source=%s)" % (wrapperKey,trySourceFirst))
wrapper = self.moduleManager.get_module_wrapper_with_instance(wrapperKey)
if dpName:
wrapper.set_name(dpName)
if wrapper is not None:
if dpxml:
for i in dpxml.childNodes:
if i.nodeType == i.ELEMENT_NODE and i.localName == "configuration":
wrapper.set_configuration_xml(xmltext=i.toxml())
cond.add_dataprovider(wrapper, trySourceFirst)
def on_dataprovider_available_unavailable(self, loader, dpw):
"""
Removes all PendingWrappers corresponding to dpw and replaces with new dpw instances
"""
key = dpw.get_key()
for c in self.get_all_conduits():
for dp in c.get_dataproviders_by_key(key):
new = self.moduleManager.get_module_wrapper_with_instance(key)
#retain configuration information
new.set_configuration_xml(dp.get_configuration_xml())
new.set_name(dp.get_name())
c.change_dataprovider(
oldDpw=dp,
newDpw=new
)
def emit(self, *args):
"""
Override the gobject signal emission so that all signals are emitted
from the main loop on an idle handler
"""
gobject.idle_add(gobject.GObject.emit,self,*args)
def create_preconfigured_conduit(self, sourceKey, sinkKey, twoway):
cond = Conduit.Conduit(self.syncManager)
self.add_conduit(cond)
if twoway == True:
cond.enable_two_way_sync()
self._restore_dataprovider(cond, sourceKey, trySourceFirst=True)
self._restore_dataprovider(cond, sinkKey, trySourceFirst=False)
def add_conduit(self, cond):
self.conduits.append(cond)
self.emit("conduit-added", cond)
def remove_conduit(self, cond):
self.emit("conduit-removed", cond)
cond.quit()
self.conduits.remove(cond)
def get_all_conduits(self):
return self.conduits
def get_conduit(self, index):
return self.conduits[index]
def index (self, conduit):
return self.conduits.index(conduit)
def num_conduits(self):
return len(self.conduits)
def clear(self):
for c in self.conduits[:]:
self.remove_conduit(c)
def save_to_xml(self, xmlSettingFilePath=None):
"""
Saves the synchronisation settings (icluding all dataproviders and how
they are connected) to an xml file so that the 'sync set' can
be restored later
"""
if xmlSettingFilePath == None:
xmlSettingFilePath = self.xmlSettingFilePath
log.info("Saving Sync Set to %s" % self.xmlSettingFilePath)
#Build the application settings xml document
doc = xml.dom.minidom.Document()
rootxml = doc.createElement("conduit-application")
rootxml.setAttribute("application-version", conduit.VERSION)
rootxml.setAttribute("settings-version", SETTINGS_VERSION)
doc.appendChild(rootxml)
#Store the conduits
for cond in self.conduits:
conduitxml = doc.createElement("conduit")
conduitxml.setAttribute("uid",cond.uid)
conduitxml.setAttribute("twoway",str(cond.is_two_way()))
conduitxml.setAttribute("autosync",str(cond.do_auto_sync()))
for policyName in Conduit.CONFLICT_POLICY_NAMES:
conduitxml.setAttribute(
"%s_policy" % policyName,
cond.get_policy(policyName)
)
rootxml.appendChild(conduitxml)
#Store the source
source = cond.datasource
if source is not None:
sourcexml = doc.createElement("datasource")
sourcexml.setAttribute("key", source.get_key())
sourcexml.setAttribute("name", source.get_name())
conduitxml.appendChild(sourcexml)
#Store source settings
configxml = xml.dom.minidom.parseString(source.get_configuration_xml())
sourcexml.appendChild(configxml.documentElement)
#Store all sinks
sinksxml = doc.createElement("datasinks")
for sink in cond.datasinks:
sinkxml = doc.createElement("datasink")
sinkxml.setAttribute("key", sink.get_key())
sinkxml.setAttribute("name", sink.get_name())
sinksxml.appendChild(sinkxml)
#Store sink settings
configxml = xml.dom.minidom.parseString(sink.get_configuration_xml())
sinkxml.appendChild(configxml.documentElement)
conduitxml.appendChild(sinksxml)
#Save to disk
try:
file_object = open(xmlSettingFilePath, "w")
file_object.write(doc.toxml())
#file_object.write(doc.toprettyxml())
file_object.close()
except IOError, err:
log.warn("Could not save settings to %s (Error: %s)" % (xmlSettingFilePath, err.strerror))
def restore_from_xml(self, xmlSettingFilePath=None):
"""
Restores sync settings from the xml file
"""
if xmlSettingFilePath == None:
xmlSettingFilePath = self.xmlSettingFilePath
log.info("Restoring Sync Set from %s" % xmlSettingFilePath)
#Check the file exists
if not os.path.isfile(xmlSettingFilePath):
log.info("%s not present" % xmlSettingFilePath)
return
try:
#Open
doc = xml.dom.minidom.parse(xmlSettingFilePath)
#check the xml file is in a version we can read.
if doc.documentElement.hasAttribute("settings-version"):
xml_version = doc.documentElement.getAttribute("settings-version")
try:
xml_version = int(xml_version)
except ValueError, TypeError:
log.error("%s xml file version is not valid" % xmlSettingFilePath)
os.remove(xmlSettingFilePath)
return
if int(SETTINGS_VERSION) < xml_version:
log.warning("%s xml file is incorrect version" % xmlSettingFilePath)
os.remove(xmlSettingFilePath)
return
else:
log.info("%s xml file version not found, assuming too old, removing" % xmlSettingFilePath)
os.remove(xmlSettingFilePath)
return
#Parse...
for conds in doc.getElementsByTagName("conduit"):
#create a new conduit
cond = Conduit.Conduit(self.syncManager, conds.getAttribute("uid"))
self.add_conduit(cond)
#restore conduit specific settings
twoway = Settings.string_to_bool(conds.getAttribute("twoway"))
if twoway == True:
cond.enable_two_way_sync()
auto = Settings.string_to_bool(conds.getAttribute("autosync"))
if auto == True:
cond.enable_auto_sync()
for policyName in Conduit.CONFLICT_POLICY_NAMES:
cond.set_policy(
policyName,
conds.getAttribute("%s_policy" % policyName)
)
#each dataprovider
for i in conds.childNodes:
#keep a ref to the dataproider was added to so that we
#can apply settings to it at the end
#one datasource
if i.nodeType == i.ELEMENT_NODE and i.localName == "datasource":
key = i.getAttribute("key")
name = i.getAttribute("name")
#add to canvas
if len(key) > 0:
self._restore_dataprovider(cond, key, name, i, True)
#many datasinks
elif i.nodeType == i.ELEMENT_NODE and i.localName == "datasinks":
#each datasink
for sink in i.childNodes:
if sink.nodeType == sink.ELEMENT_NODE and sink.localName == "datasink":
key = sink.getAttribute("key")
name = sink.getAttribute("name")
#add to canvas
if len(key) > 0:
self._restore_dataprovider(cond, key, name, sink, False)
except:
log.warn("Error parsing %s. Exception:\n%s" % (xmlSettingFilePath, traceback.format_exc()))
os.remove(xmlSettingFilePath)
def quit(self):
"""
Calls unitialize on all dataproviders
"""
for c in self.conduits:
c.quit()
|
from __future__ import absolute_import
from __future__ import division
import six
from vdsm.common import exception
from vdsm.common import xmlutils
from vdsm.virt.vmdevices import network, hwclass
from testlib import VdsmTestCase as TestCaseBase, XMLTestCase
from testlib import permutations, expandPermutations
from monkeypatch import MonkeyClass, MonkeyPatchScope
from testValidation import skipif
from vdsm.common import hooks
from vdsm.common import hostdev
from vdsm.common import libvirtconnection
import hostdevlib
@expandPermutations
@MonkeyClass(libvirtconnection, 'get', hostdevlib.Connection)
@MonkeyClass(hostdev, '_sriov_totalvfs', hostdevlib.fake_totalvfs)
@MonkeyClass(hostdev, '_pci_header_type', lambda _: 0)
@MonkeyClass(hooks, 'after_hostdev_list_by_caps', lambda json: json)
@MonkeyClass(hostdev, '_get_udev_block_mapping',
lambda: hostdevlib.UDEV_BLOCK_MAP)
class HostdevTests(TestCaseBase):
def testProcessDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.ADDITIONAL_DEVICE).XMLDesc()
)
self.assertEqual(
hostdevlib.ADDITIONAL_DEVICE_PROCESSED,
deviceXML
)
@skipif(six.PY3, "Not relevant in Python 3 libvirt")
# libvirt in Python 3 returns strings, so we don't deal with
# invalid coding anymore.
def testProcessDeviceParamsInvalidEncoding(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.COMPUTER_DEVICE).XMLDesc()
)
self.assertEqual(
hostdevlib.COMPUTER_DEVICE_PROCESSED,
deviceXML
)
def testProcessSRIOV_PFDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.SRIOV_PF).XMLDesc()
)
self.assertEqual(
hostdevlib.SRIOV_PF_PROCESSED,
deviceXML
)
def testProcessSRIOV_VFDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.SRIOV_VF).XMLDesc()
)
self.assertEqual(hostdevlib.SRIOV_VF_PROCESSED, deviceXML)
def testProcessNetDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.NET_DEVICE).XMLDesc()
)
self.assertEqual(hostdevlib.NET_DEVICE_PROCESSED, deviceXML)
def testProcessMdevDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.MDEV_DEVICE).XMLDesc()
)
self.assertEqual(hostdevlib.MDEV_DEVICE_PROCESSED, deviceXML)
def testGetDevicesFromLibvirt(self):
libvirt_devices, _ = hostdev._get_devices_from_libvirt()
self.assertEqual(hostdevlib.DEVICES_PROCESSED, libvirt_devices)
self.assertEqual(len(libvirt_devices),
len(hostdevlib.PCI_DEVICES) +
len(hostdevlib.USB_DEVICES) +
len(hostdevlib.SCSI_DEVICES))
@permutations([[''], [('pci',)], [('usb_device',)],
[('pci', 'usb_device')]])
def testListByCaps(self, caps):
devices = hostdev.list_by_caps(caps)
for cap in caps:
self.assertTrue(set(hostdevlib.DEVICES_BY_CAPS[cap].keys()).
issubset(set(devices.keys())))
@permutations([
# addr_type, addr, name
('usb', {'bus': '1', 'device': '2'}, 'usb_1_1'),
('usb', {'bus': '1', 'device': '10'}, 'usb_1_1_4'),
('pci', {'slot': '26', 'bus': '0', 'domain': '0', 'function': '0'},
'pci_0000_00_1a_0'),
('scsi', {'bus': '0', 'host': '1', 'lun': '0', 'target': '0'},
'scsi_1_0_0_0'),
])
def test_device_name_from_address(self, addr_type, addr, name):
# we need to make sure we scan all the devices (hence caps=None)
hostdev.list_by_caps()
self.assertEqual(
hostdev.device_name_from_address(addr_type, addr),
name
)
@MonkeyClass(libvirtconnection, 'get', hostdevlib.Connection.get)
@MonkeyClass(hostdev, '_sriov_totalvfs', hostdevlib.fake_totalvfs)
@MonkeyClass(hostdev, '_pci_header_type', lambda _: 0)
@MonkeyClass(hooks, 'after_hostdev_list_by_caps', lambda json: json)
class HostdevPerformanceTests(TestCaseBase):
def test_3k_storage_devices(self):
with hostdevlib.Connection.use_hostdev_tree():
self.assertEqual(
len(hostdev.list_by_caps()),
len(libvirtconnection.get().listAllDevices())
)
@expandPermutations
@MonkeyClass(libvirtconnection, 'get', hostdevlib.Connection)
@MonkeyClass(hostdev, '_sriov_totalvfs', hostdevlib.fake_totalvfs)
@MonkeyClass(hostdev, '_pci_header_type', lambda _: 0)
class HostdevCreationTests(XMLTestCase):
_PCI_ADDRESS = {'slot': '0x02', 'bus': '0x01', 'domain': '0x0000',
'function': '0x0', 'type': 'pci'}
_PCI_ADDRESS_XML = '<address bus="0x01" domain="0x0000" function="0x0" \
slot="0x02" type="pci"/>'
def setUp(self):
self.conf = {
'vmName': 'testVm',
'vmId': '9ffe28b6-6134-4b1e-8804-1185f49c436f',
'smp': '8', 'maxVCpus': '160',
'memSize': '1024', 'memGuaranteedSize': '512'}
# TODO: next 2 tests should reside in their own module (interfaceTests.py)
def testCreateSRIOVVF(self):
dev_spec = {'type': hwclass.NIC, 'device': 'hostdev',
'hostdev': hostdevlib.SRIOV_VF,
'macAddr': 'ff:ff:ff:ff:ff:ff',
'specParams': {'vlanid': 3},
'bootOrder': '9'}
device = network.Interface(self.log, **dev_spec)
self.assertXMLEqual(
xmlutils.tostring(device.getXML()),
hostdevlib.DEVICE_XML[hostdevlib.SRIOV_VF] % ('',))
def testCreateSRIOVVFWithAddress(self):
dev_spec = {'type': hwclass.NIC, 'device': 'hostdev',
'hostdev': hostdevlib.SRIOV_VF,
'macAddr': 'ff:ff:ff:ff:ff:ff',
'specParams': {'vlanid': 3},
'bootOrder': '9', 'address':
{'slot': '0x02', 'bus': '0x01', 'domain': '0x0000',
'function': '0x0', 'type': 'pci'}}
device = network.Interface(self.log, **dev_spec)
self.assertXMLEqual(
xmlutils.tostring(device.getXML()),
hostdevlib.DEVICE_XML[hostdevlib.SRIOV_VF] % (
self._PCI_ADDRESS_XML
)
)
@expandPermutations
@MonkeyClass(hostdev, '_each_supported_mdev_type', hostdevlib.fake_mdev_types)
@MonkeyClass(hostdev, '_mdev_type_details', hostdevlib.fake_mdev_details)
@MonkeyClass(hostdev, '_mdev_device_vendor', hostdevlib.fake_mdev_vendor)
@MonkeyClass(hostdev, '_mdev_type_devices', hostdevlib.fake_mdev_instances)
@MonkeyClass(hostdev, 'supervdsm', hostdevlib.FakeSuperVdsm())
class TestMdev(TestCaseBase):
def setUp(self):
def make_device(name):
mdev_types = [
hostdevlib.FakeMdevType('incompatible-1', 2),
hostdevlib.FakeMdevType('8q', 1),
hostdevlib.FakeMdevType('4q', 2),
hostdevlib.FakeMdevType('incompatible-2', 2),
]
return hostdevlib.FakeMdevDevice(name=name, vendor='0x10de',
mdev_types=mdev_types)
self.devices = [make_device(name) for name in ('card-1', 'card-2',)]
@permutations([
# (mdev_type, mdev_uuid)*, mdev_placement, instances
[[('4q', '4q-1')],
hostdev.MdevPlacement.COMPACT, [['4q-1'], []]],
[[('8q', '8q-1')],
hostdev.MdevPlacement.SEPARATE, [['8q-1'], []]],
[[('4q', '4q-1'), ('4q', '4q-2')],
hostdev.MdevPlacement.COMPACT, [['4q-1', '4q-2'], []]],
[[('4q', '4q-1'), ('8q', '8q-1')],
hostdev.MdevPlacement.COMPACT, [['4q-1'], ['8q-1']]],
[[('4q', '4q-1'), ('4q', '4q-2')],
hostdev.MdevPlacement.SEPARATE, [['4q-1'], ['4q-2']]],
[[('4q', '4q-1'), ('8q', '8q-1'), ('4q', '4q-2')],
hostdev.MdevPlacement.COMPACT, [['4q-1', '4q-2'], ['8q-1']]],
[[('8q', '8q-1'), ('4q', '4q-1'), ('4q', '4q-2')],
hostdev.MdevPlacement.COMPACT, [['8q-1'], ['4q-1', '4q-2']]],
[[('4q', '4q-1'), ('4q', '4q-2'), ('8q', '8q-1')],
hostdev.MdevPlacement.COMPACT, [['4q-1', '4q-2'], ['8q-1']]],
[[('4q', '4q-1'), ('8q', '8q-1'), ('4q', '4q-2')],
hostdev.MdevPlacement.SEPARATE, [['4q-1', '4q-2'], ['8q-1']]],
])
def test_vgpu_placement(self, mdev_specs, mdev_placement, instances):
with MonkeyPatchScope([
(hostdev, '_each_mdev_device', lambda: self.devices)
]):
for mdev_type, mdev_uuid in mdev_specs:
hostdev.spawn_mdev(mdev_type, mdev_uuid, mdev_placement,
self.log)
for inst, dev in zip(instances, self.devices):
dev_inst = []
for mdev_type in dev.mdev_types:
dev_inst.extend(mdev_type.instances)
self.assertEqual(inst, dev_inst)
@permutations([
[hostdev.MdevPlacement.COMPACT],
[hostdev.MdevPlacement.SEPARATE],
])
def test_unsupported_vgpu_placement(self, placement):
with MonkeyPatchScope([
(hostdev, '_each_mdev_device', lambda: self.devices)
]):
self.assertRaises(
exception.ResourceUnavailable,
hostdev.spawn_mdev, 'unsupported', '1234', placement, self.log
)
|
from random import choice
from feedparser import parse
from errbot import botcmd, BotPlugin
class DevOpsBorat(BotPlugin):
"""
Quotes from various dev humour related twitter accounts
"""
@botcmd
def borat(self, mess, args):
"""
Random quotes from the DEVOPS_BORAT twitter account
"""
myfeed = parse('http://api.twitter.com/1/statuses/user_timeline.rss?screen_name=DEVOPS_BORAT')
items = myfeed['entries']
return choice(items).description
@botcmd
def jesus(self, mess, args):
"""
Random quotes from the devops_jesus twitter account
"""
myfeed = parse('http://api.twitter.com/1/statuses/user_timeline.rss?screen_name=devops_jesus')
items = myfeed['entries']
return choice(items).description
@botcmd
def yoda(self, mess, args):
"""
Random quotes from the UXYoda twitter account
"""
myfeed = parse('http://api.twitter.com/1/statuses/user_timeline.rss?screen_name=UXYoda')
items = myfeed['entries']
return choice(items).description
|
import logging
import readline
import shlex
from getpass import getpass
from ConfigParser import NoOptionError
from spacecmd.utils import *
from time import sleep
import xmlrpclib
HELP_SYSTEM_OPTS = '''<SYSTEMS> can be any of the following:
name
ssm (see 'help ssm')
search:QUERY (see 'help system_search')
group:GROUP
channel:CHANNEL
'''
HELP_TIME_OPTS = '''Dates can be any of the following:
Explicit Dates:
Dates can be expressed as explicit date strings in the YYYYMMDD[HHMM]
format. The year, month and day are required, while the hours and
minutes are not; the hours and minutes will default to 0000 if no
values are provided.
Deltas:
Dates can be expressed as delta values. For example, '2h' would
mean 2 hours in the future. You can also use negative values to
express times in the past (e.g., -7d would be one week ago).
Units:
s -> seconds
m -> minutes
h -> hours
d -> days
'''
SYSTEM_CACHE_TTL = 3600
PACKAGE_CACHE_TTL = 86400
ERRATA_CACHE_TTL = 86400
MINIMUM_API_VERSION = 10.8
SEPARATOR = '\n' + '#' * 30 + '\n'
ENTITLEMENTS = ['enterprise_entitled',
'virtualization_host'
]
SYSTEM_SEARCH_FIELDS = ['id', 'name', 'ip', 'hostname',
'device', 'vendor', 'driver', 'uuid']
def help_systems(self):
print HELP_SYSTEM_OPTS
def help_time(self):
print HELP_TIME_OPTS
def help_clear(self):
print 'clear: clear the screen'
print 'usage: clear'
def do_clear(self, args):
os.system('clear')
def help_clear_caches(self):
print 'clear_caches: Clear the internal caches kept for systems' + \
' and packages'
print 'usage: clear_caches'
def do_clear_caches(self, args):
self.clear_system_cache()
self.clear_package_cache()
self.clear_errata_cache()
def help_get_apiversion(self):
print 'get_apiversion: Display the API version of the server'
print 'usage: get_apiversion'
def do_get_apiversion(self, args):
print self.client.api.getVersion()
def help_get_serverversion(self):
print 'get_serverversion: Display the version of the server'
print 'usage: get_serverversion'
def do_get_serverversion(self, args):
print self.client.api.systemVersion()
def help_get_certificateexpiration(self):
print 'get_certificateexpiration: Print the expiration date of the'
print " server's entitlement certificate"
print 'usage: get_certificateexpiration'
def do_get_certificateexpiration(self, args):
date = self.client.satellite.getCertificateExpirationDate(self.session)
print date
def help_list_proxies(self):
print 'list_proxies: List the proxies wihtin the user\'s organization '
print 'usage: list_proxies'
def do_list_proxies(self, args):
proxies = self.client.satellite.listProxies(self.session)
print proxies
def help_get_session(self):
print 'get_session: Show the current session string'
print 'usage: get_session'
def do_get_session(self, args):
if self.session:
print self.session
else:
logging.error('No session found')
def help_help(self):
print 'help: Show help for the given command'
print 'usage: help COMMAND'
def help_history(self):
print 'history: List your command history'
print 'usage: history'
def do_history(self, args):
for i in range(1, readline.get_current_history_length()):
print '%s %s' % (str(i).rjust(4), readline.get_history_item(i))
def help_toggle_confirmations(self):
print 'toggle_confirmations: Toggle confirmation messages on/off'
print 'usage: toggle_confirmations'
def do_toggle_confirmations(self, args):
if self.options.yes:
self.options.yes = False
print 'Confirmation messages are enabled'
else:
self.options.yes = True
logging.warning('Confirmation messages are DISABLED!')
def help_login(self):
print 'login: Connect to a Spacewalk server'
print 'usage: login [USERNAME] [SERVER]'
def do_login(self, args):
(args, _options) = parse_arguments(args)
# logout before logging in again
if len(self.session):
logging.warning('You are already logged in')
return True
# an argument passed to the function get precedence
if len(args) == 2:
server = args[1]
else:
# use the server we were already using
server = self.config['server']
# bail out if not server was given
if not server:
logging.warning('No server specified')
return False
# load the server-specific configuration
self.load_config_section(server)
# an argument passed to the function get precedence
if len(args):
username = args[0]
elif self.config.has_key('username'):
# use the username from before
username = self.config['username']
elif self.options.username:
# use the username from before
username = self.options.username
else:
username = ''
# set the protocol
if self.config.has_key('nossl') and self.config['nossl']:
proto = 'http'
else:
proto = 'https'
server_url = '%s://%s/rpc/api' % (proto, server)
# this will enable spewing out all client/server traffic
verbose_xmlrpc = False
if self.options.debug > 1:
verbose_xmlrpc = True
# connect to the server
logging.debug('Connecting to %s', server_url)
self.client = xmlrpclib.Server(server_url, verbose=verbose_xmlrpc)
# check the API to verify connectivity
try:
self.api_version = self.client.api.getVersion()
logging.debug('Server API Version = %s', self.api_version)
except xmlrpclib.Fault, e:
if self.options.debug > 0:
logging.exception(e)
logging.error('Failed to connect to %s', server_url)
self.client = None
return False
# ensure the server is recent enough
if self.api_version < self.MINIMUM_API_VERSION:
logging.error('API (%s) is too old (>= %s required)',
self.api_version, self.MINIMUM_API_VERSION)
self.client = None
return False
# store the session file in the server's own directory
session_file = os.path.join(self.conf_dir, server, 'session')
# retrieve a cached session
if os.path.isfile(session_file) and not self.options.password:
try:
sessionfile = open(session_file, 'r')
# read the session (format = username:session)
for line in sessionfile:
parts = line.split(':')
# if a username was passed, make sure it matches
if len(username):
if parts[0] == username:
self.session = parts[1]
else:
# get the username from the cache if one
# wasn't passed by the user
username = parts[0]
self.session = parts[1]
sessionfile.close()
except IOError:
logging.error('Could not read %s', session_file)
# check the cached credentials by doing an API call
if self.session:
try:
logging.debug('Using cached credentials from %s', session_file)
self.client.user.listAssignableRoles(self.session)
except xmlrpclib.Fault:
logging.warning('Cached credentials are invalid')
self.current_user = ''
self.session = ''
# attempt to login if we don't have a valid session yet
if not len(self.session):
if len(username):
logging.info('Spacewalk Username: %s', username)
else:
username = prompt_user('Spacewalk Username:', noblank=True)
if self.options.password:
password = self.options.password
# remove this from the options so that if 'login' is called
# again, the user is prompted for the information
self.options.password = None
elif self.config.has_key('password'):
password = self.config['password']
else:
password = getpass('Spacewalk Password: ')
# login to the server
try:
self.session = self.client.auth.login(username, password)
# don't keep the password around
password = None
except xmlrpclib.Fault:
logging.error('Invalid credentials')
return False
try:
# make sure ~/.spacecmd/<server> exists
conf_dir = os.path.join(self.conf_dir, server)
if not os.path.isdir(conf_dir):
os.mkdir(conf_dir, 0700)
# add the new cache to the file
line = '%s:%s\n' % (username, self.session)
# write the new cache file out
sessionfile = open(session_file, 'w')
sessionfile.write(line)
sessionfile.close()
except IOError:
logging.error('Could not write session file')
# load the system/package/errata caches
self.load_caches(server)
# keep track of who we are and who we're connected to
self.current_user = username
self.server = server
logging.info('Connected to %s as %s', server_url, username)
return True
def help_logout(self):
print 'logout: Disconnect from the server'
print 'usage: logout'
def do_logout(self, args):
if self.session:
self.client.auth.logout(self.session)
self.session = ''
self.current_user = ''
self.server = ''
self.do_clear_caches('')
def help_whoami(self):
print 'whoami: Print the name of the currently logged in user'
print 'usage: whoami'
def do_whoami(self, args):
if len(self.current_user):
print self.current_user
else:
logging.warning("You are not logged in")
def help_whoamitalkingto(self):
print 'whoamitalkingto: Print the name of the server'
print 'usage: whoamitalkingto'
def do_whoamitalkingto(self, args):
if len(self.server):
print self.server
else:
logging.warning('Yourself')
def tab_complete_errata(self, text):
options = self.do_errata_list('', True)
options.append('search:')
return tab_completer(options, text)
def tab_complete_systems(self, text):
if re.match('group:', text):
# prepend 'group' to each item for tab completion
groups = ['group:%s' % g for g in self.do_group_list('', True)]
return tab_completer(groups, text)
elif re.match('channel:', text):
# prepend 'channel' to each item for tab completion
channels = ['channel:%s' % s
for s in self.do_softwarechannel_list('', True)]
return tab_completer(channels, text)
elif re.match('search:', text):
# prepend 'search' to each item for tab completion
fields = ['search:%s:' % f for f in self.SYSTEM_SEARCH_FIELDS]
return tab_completer(fields, text)
else:
options = self.get_system_names()
# add our special search options
options.extend(['group:', 'channel:', 'search:'])
return tab_completer(options, text)
def remove_last_history_item(self):
last = readline.get_current_history_length() - 1
if last >= 0:
readline.remove_history_item(last)
def clear_errata_cache(self):
self.all_errata = {}
self.errata_cache_expire = datetime.now()
self.save_errata_cache()
def get_errata_names(self):
return sorted([e.get('advisory_name') for e in self.all_errata])
def get_erratum_id(self, name):
if name in self.all_errata:
return self.all_errata[name]['id']
def get_erratum_name(self, erratum_id):
for erratum in self.all_errata:
if self.all_errata[erratum]['id'] == erratum_id:
return erratum
def generate_errata_cache(self, force=False):
if not force and datetime.now() < self.errata_cache_expire:
return
if not self.options.quiet:
# tell the user what's going on
self.replace_line_buffer('** Generating errata cache **')
channels = self.client.channel.listSoftwareChannels(self.session)
channels = [c.get('label') for c in channels]
for c in channels:
try:
errata = \
self.client.channel.software.listErrata(self.session, c)
except xmlrpclib.Fault:
logging.debug('No access to %s', c)
continue
for erratum in errata:
if erratum.get('advisory_name') not in self.all_errata:
self.all_errata[erratum.get('advisory_name')] = \
{'id': erratum.get('id'),
'advisory_name': erratum.get('advisory_name'),
'advisory_type': erratum.get('advisory_type'),
'date': erratum.get('date'),
'advisory_synopsis': erratum.get('advisory_synopsis')}
self.errata_cache_expire = \
datetime.now() + timedelta(self.ERRATA_CACHE_TTL)
self.save_errata_cache()
if not self.options.quiet:
# restore the original line buffer
self.replace_line_buffer()
def save_errata_cache(self):
save_cache(self.errata_cache_file,
self.all_errata,
self.errata_cache_expire)
def clear_package_cache(self):
self.all_packages_short = {}
self.all_packages = {}
self.all_packages_by_id = {}
self.package_cache_expire = datetime.now()
self.save_package_caches()
def generate_package_cache(self, force=False):
if not force and datetime.now() < self.package_cache_expire:
return
if not self.options.quiet:
# tell the user what's going on
self.replace_line_buffer('** Generating package cache **')
channels = self.client.channel.listSoftwareChannels(self.session)
channels = [c.get('label') for c in channels]
for c in channels:
try:
packages = \
self.client.channel.software.listAllPackages(self.session, c)
except xmlrpclib.Fault:
logging.debug('No access to %s', c)
continue
for p in packages:
if not p.get('name') in self.all_packages_short:
self.all_packages_short[p.get('name')] = ''
longname = build_package_names(p)
if not longname in self.all_packages:
self.all_packages[longname] = [p.get('id')]
else:
self.all_packages[longname].append(p.get('id'))
# keep a reverse dictionary so we can lookup package names by ID
self.all_packages_by_id = {}
for (k, v) in self.all_packages.iteritems():
for i in v:
self.all_packages_by_id[i] = k
self.package_cache_expire = \
datetime.now() + timedelta(seconds=self.PACKAGE_CACHE_TTL)
self.save_package_caches()
if not self.options.quiet:
# restore the original line buffer
self.replace_line_buffer()
def save_package_caches(self):
# store the cache to disk to speed things up
save_cache(self.packages_short_cache_file,
self.all_packages_short,
self.package_cache_expire)
save_cache(self.packages_long_cache_file,
self.all_packages,
self.package_cache_expire)
save_cache(self.packages_by_id_cache_file,
self.all_packages_by_id,
self.package_cache_expire)
def get_package_names(self, longnames=False):
self.generate_package_cache()
if longnames:
return self.all_packages.keys()
else:
return self.all_packages_short
def get_package_id(self, name):
self.generate_package_cache()
try:
return set(self.all_packages[name])
except KeyError:
return
def get_package_name(self, package_id):
self.generate_package_cache()
try:
return self.all_packages_by_id[package_id]
except KeyError:
return
def clear_system_cache(self):
self.all_systems = {}
self.system_cache_expire = datetime.now()
self.save_system_cache()
def generate_system_cache(self, force=False, delay=0):
if not force and datetime.now() < self.system_cache_expire:
return
if not self.options.quiet:
# tell the user what's going on
self.replace_line_buffer('** Generating system cache **')
# we might need to wait for some systems to delete
if delay:
sleep(delay)
systems = self.client.system.listSystems(self.session)
self.all_systems = {}
for s in systems:
self.all_systems[s.get('id')] = s.get('name')
self.system_cache_expire = \
datetime.now() + timedelta(seconds=self.SYSTEM_CACHE_TTL)
self.save_system_cache()
if not self.options.quiet:
# restore the original line buffer
self.replace_line_buffer()
def save_system_cache(self):
save_cache(self.system_cache_file,
self.all_systems,
self.system_cache_expire)
def load_caches(self, server):
conf_dir = os.path.join(self.conf_dir, server)
try:
if not os.path.isdir(conf_dir):
os.mkdir(conf_dir, 0700)
except OSError:
logging.error('Could not create directory %s', conf_dir)
return
self.ssm_cache_file = os.path.join(conf_dir, 'ssm')
self.system_cache_file = os.path.join(conf_dir, 'systems')
self.errata_cache_file = os.path.join(conf_dir, 'errata')
self.packages_long_cache_file = os.path.join(conf_dir, 'packages_long')
self.packages_by_id_cache_file = \
os.path.join(conf_dir, 'packages_by_id')
self.packages_short_cache_file = \
os.path.join(conf_dir, 'packages_short')
# load self.ssm from disk
(self.ssm, _ignore) = load_cache(self.ssm_cache_file)
# update the prompt now that we loaded the SSM
self.postcmd(False, '')
# load self.all_systems from disk
(self.all_systems, self.system_cache_expire) = \
load_cache(self.system_cache_file)
# load self.all_errata from disk
(self.all_errata, self.errata_cache_expire) = \
load_cache(self.errata_cache_file)
# load self.all_packages_short from disk
(self.all_packages_short, self.package_cache_expire) = \
load_cache(self.packages_short_cache_file)
# load self.all_packages from disk
(self.all_packages, self.package_cache_expire) = \
load_cache(self.packages_long_cache_file)
# load self.all_packages_by_id from disk
(self.all_packages_by_id, self.package_cache_expire) = \
load_cache(self.packages_by_id_cache_file)
def get_system_names(self):
self.generate_system_cache()
return self.all_systems.values()
def get_system_id(self, name):
self.generate_system_cache()
try:
# check if we were passed a system instead of a name
system_id = int(name)
if system_id in self.all_systems:
return system_id
except ValueError:
pass
# get a set of matching systems to check for duplicate names
systems = []
for system_id in self.all_systems:
if name == self.all_systems[system_id]:
systems.append(system_id)
if len(systems) == 1:
return systems[0]
elif not len(systems):
logging.warning("Can't find system ID for %s", name)
return 0
else:
logging.warning('Duplicate system profile names found!')
logging.warning("Please reference systems by ID or resolve the")
logging.warning("underlying issue with 'system_delete' or 'system_rename'")
id_list = '%s = ' % name
for system_id in systems:
id_list = id_list + '%i, ' % system_id
logging.warning('')
logging.warning(id_list[:-2])
return 0
def get_system_name(self, system_id):
self.generate_system_cache()
try:
return self.all_systems[system_id]
except KeyError:
return
def get_org_id(self, name):
details = self.client.org.getDetails(self.session, name)
return details.get('id')
def expand_errata(self, args):
if not isinstance(args, list):
args = args.split()
self.generate_errata_cache()
if len(args) == 0:
return self.all_errata
errata = []
for item in args:
if re.match('search:', item):
item = re.sub('search:', '', item)
errata.extend(self.do_errata_search(item, True))
else:
errata.append(item)
matches = filter_results(self.all_errata, errata)
return matches
def expand_systems(self, args):
if not isinstance(args, list):
args = shlex.split(args)
systems = []
system_ids = []
for item in args:
if re.match('ssm', item, re.I):
systems.extend(self.ssm)
elif re.match('group:', item):
item = re.sub('group:', '', item)
members = self.do_group_listsystems("'%s'" % item, True)
if len(members):
systems.extend([re.escape(m) for m in members])
else:
logging.warning('No systems in group %s', item)
elif re.match('search:', item):
query = item.split(':', 1)[1]
results = self.do_system_search(query, True)
if len(results):
systems.extend([re.escape(r) for r in results])
elif re.match('channel:', item):
item = re.sub('channel:', '', item)
members = self.do_softwarechannel_listsystems(item, True)
if len(members):
systems.extend([re.escape(m) for m in members])
else:
logging.warning('No systems subscribed to %s', item)
else:
# translate system IDs that the user passes
try:
sys_id = int(item)
system_ids.append(sys_id)
except ValueError:
# just a system name
systems.append(item)
matches = filter_results(self.get_system_names(), systems)
return list(set(matches + system_ids))
def list_base_channels(self):
all_channels = self.client.channel.listSoftwareChannels(self.session)
base_channels = []
for c in all_channels:
if not c.get('parent_label'):
base_channels.append(c.get('label'))
return base_channels
def list_child_channels(self, system=None, parent=None, subscribed=False):
channels = []
if system:
system_id = self.get_system_id(system)
if not system_id:
return
if subscribed:
channels = \
self.client.system.listSubscribedChildChannels(self.session,
system_id)
else:
channels = self.client.system.listSubscribableChildChannels(
self.session, system_id)
elif parent:
all_channels = \
self.client.channel.listSoftwareChannels(self.session)
for c in all_channels:
if parent == c.get('parent_label'):
channels.append(c)
else:
# get all channels that have a parent
all_channels = \
self.client.channel.listSoftwareChannels(self.session)
for c in all_channels:
if c.get('parent_label'):
channels.append(c)
return [c.get('label') for c in channels]
def user_confirm(self, prompt='Is this ok [y/N]:', nospacer=False,
integer=False, ignore_yes=False):
if self.options.yes and not ignore_yes:
return True
if nospacer:
answer = prompt_user('%s' % prompt)
else:
answer = prompt_user('\n%s' % prompt)
if re.match('y', answer, re.I):
if integer:
return 1
else:
return True
else:
if integer:
return 0
else:
return False
def check_api_version(self, want):
want_parts = [int(i) for i in want.split('.')]
have_parts = [int(i) for i in self.api_version.split('.')]
if len(have_parts) == 2 and len(want_parts) == 2:
if have_parts[0] == want_parts[0]:
# compare minor versions if majors are the same
return have_parts[1] >= want_parts[1]
else:
# only compare major versions if they differ
return have_parts[0] >= want_parts[0]
else:
# compare the whole value
return float(self.api_version) >= float(want)
def replace_line_buffer(self, msg=None):
# restore the old buffer if we weren't given a new line
if not msg:
msg = readline.get_line_buffer()
# don't print a prompt if there wasn't one to begin with
if len(readline.get_line_buffer()):
new_line = '%s%s' % (self.prompt, msg)
else:
new_line = '%s' % msg
# clear the current line
self.stdout.write('\r'.ljust(len(self.current_line) + 1))
self.stdout.flush()
# write the new line
self.stdout.write('\r%s' % new_line)
self.stdout.flush()
# keep track of what is displayed so we can clear it later
self.current_line = new_line
def load_config_section(self, section):
config_opts = ['server', 'username', 'password', 'nossl']
if not self.config_parser.has_section(section):
logging.debug('Configuration section [%s] does not exist', section)
return
logging.debug('Loading configuration section [%s]', section)
for key in config_opts:
# don't override command-line options
if self.options.__dict__[key]:
# set the config value to the command-line argument
self.config[key] = self.options.__dict__[key]
else:
try:
self.config[key] = self.config_parser.get(section, key)
except NoOptionError:
pass
# handle the nossl boolean
if self.config.has_key('nossl') and isinstance(self.config['nossl'], str):
if re.match('^1|y|true$', self.config['nossl'], re.I):
self.config['nossl'] = True
else:
self.config['nossl'] = False
# Obfuscate the password with asterisks
config_debug = self.config.copy()
if config_debug.has_key('password'):
config_debug['password'] = "*" * len(config_debug['password'])
logging.debug('Current Configuration: %s', config_debug)
|
from openerp.osv import fields, osv
from openerp.tools.translate import _
class sale_order_line(osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order.line'
_columns = {
'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this
option, the first attachment related to the product_id marked as brochure will be printed
as extra info with sale order"""),
}
class sale_order(osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order'
def print_with_attachment(self, cr, user, ids, context={}):
for o in self.browse(cr, user, ids, context):
for ol in o.order_line:
if ol.att_bro:
print "Im Here i will go to print %s " % ol.name
return True
def __get_company_object(self, cr, uid):
user = self.pool.get('res.users').browse(cr, uid, uid)
print user
if not user.company_id:
raise except_osv(_('ERROR !'), _(
'There is no company configured for this user'))
return user.company_id
def _get_report_name(self, cr, uid, context):
report = self.__get_company_object(cr, uid).sale_report_id
if not report:
rep_id = self.pool.get("ir.actions.report.xml").search(
cr, uid, [('model', '=', 'sale.order'), ], order="id")[0]
report = self.pool.get(
"ir.actions.report.xml").browse(cr, uid, rep_id)
return report.report_name
def print_quotation(self, cr, uid, ids, context=None):
pq = super(sale_order, self).print_quotation(cr,uid,ids, context)
return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid,
context), 'datas': pq['datas'], 'nodestroy': True}
|
import re
from xbmcswift2 import Plugin, xbmc, xbmcgui
from resources.lib import scraper
STRINGS = {
'page': 30000,
'search': 30001,
'show_my_favs': 30002,
'no_scraper_found': 30003,
'add_to_my_favs': 30004,
'del_from_my_favs': 30005,
'no_my_favs': 30006,
'use_context_menu': 30007,
'to_add': 30008,
}
plugin = Plugin()
@plugin.route('/')
def show_categories():
items = [{
'label': category['title'],
'path': plugin.url_for(
endpoint='show_path',
path=category['path']
)
} for category in scraper.get_categories()]
items.append({
'label': _('search'),
'path': plugin.url_for('video_search')
})
items.append({
'label': _('show_my_favs'),
'path': plugin.url_for('show_my_favs')
})
return plugin.finish(items)
@plugin.route('/search/')
def video_search():
search_string = __keyboard(_('search'))
if search_string:
__log('search gots a string: "%s"' % search_string)
url = plugin.url_for(
endpoint='video_search_result',
search_string=search_string
)
plugin.redirect(url)
@plugin.route('/search/<search_string>/')
def video_search_result(search_string):
path = scraper.get_search_path(search_string)
return show_path(path)
@plugin.route('/my_favs/')
def show_my_favs():
def context_menu(item_path):
context_menu = [(
_('del_from_my_favs'),
'XBMC.RunPlugin(%s)' % plugin.url_for('del_from_my_favs',
item_path=item_path),
)]
return context_menu
my_fav_items = plugin.get_storage('my_fav_items')
items = my_fav_items.values()
for item in items:
item['context_menu'] = context_menu(item['path'])
if not items:
dialog = xbmcgui.Dialog()
dialog.ok(_('no_my_favs'), _('use_context_menu'), _('to_add'))
return
return plugin.finish(items)
@plugin.route('/path/<path>/')
def show_path(path):
try:
items, next_page, prev_page = scraper.get_path(path)
except NotImplementedError:
plugin.notify(msg=_('no_scraper_found'), title='Path: %s' % path)
else:
return __add_items(items, next_page, prev_page)
def __add_items(entries, next_page=None, prev_page=None):
my_fav_items = plugin.get_storage('my_fav_items')
def context_menu(item_path, video_id):
if not item_path in my_fav_items:
context_menu = [(
_('add_to_my_favs'),
'XBMC.RunPlugin(%s)' % plugin.url_for(
endpoint='add_to_my_favs',
item_path=item_path
),
)]
else:
context_menu = [(
_('del_from_my_favs'),
'XBMC.RunPlugin(%s)' % plugin.url_for(
endpoint='del_from_my_favs',
item_path=item_path
),
)]
return context_menu
def format_episode_title(title):
if fix_show_title and '-' in title and ('Folge' in title or 'Staffel' in title):
title, show = title.rsplit('-', 1)
title = title.replace('Staffel ', 'S').replace(' Folge ', 'E')
title = title.replace('Folge ', 'E').replace('Ganze Folge', '')
return u'%s %s' % (show.strip(), title.strip())
return title
def better_thumbnail(thumb_url):
if 'web/' in thumb_url and not thumb_url.startswith('http://is'):
thumb_url = thumb_url.replace('http://i', 'http://is')
thumb_url = re.sub('mv/web/[0-9]+', 'de', thumb_url)
thumb_url = thumb_url.replace('.jpg', '.jpg_hq.jpg')
return thumb_url
fix_show_title = plugin.get_setting('fix_show_title', bool)
temp_items = plugin.get_storage('temp_items')
temp_items.clear()
items = []
has_icons = False
i = 0
for i, entry in enumerate(entries):
if not has_icons and entry.get('thumb'):
has_icons = True
if entry['is_folder']:
items.append({
'label': entry['title'],
'thumbnail': entry.get('thumb', 'DefaultFolder.png'),
'info': {'count': i + 1},
'path': plugin.url_for(
endpoint='show_path',
path=entry['path']
)
})
else:
items.append({
'label': format_episode_title(entry['title']),
'thumbnail': better_thumbnail(
entry.get('thumb', 'DefaultVideo.png')
),
'icon': entry.get('thumb', 'DefaultVideo.png'),
'info': {
'video_id': entry['video_id'],
'count': i + 1,
'plot': entry.get('description', ''),
'studio': entry.get('author', {}).get('name', ''),
'date': entry.get('date', ''),
'year': int(entry.get('year', 0)),
'rating': float(entry.get('rating', 0)),
'votes': unicode(entry.get('votes')),
'views': unicode(entry.get('views', 0))
},
'stream_info': {
'video': {'duration': entry.get('duration', 0)}
},
'is_playable': True,
'path': plugin.url_for(
endpoint='watch_video',
video_id=entry['video_id']
)
})
if prev_page:
items.append({
'label': '<< %s %s <<' % (_('page'), prev_page['number']),
'info': {'count': 0},
'thumbnail': 'DefaultFolder.png',
'path': plugin.url_for(
endpoint='show_path',
path=prev_page['path'],
update='true',
)
})
if next_page:
items.append({
'label': '>> %s %s >>' % (_('page'), next_page['number']),
'thumbnail': 'DefaultFolder.png',
'info': {'count': i + 2},
'path': plugin.url_for(
endpoint='show_path',
path=next_page['path'],
update='true',
)
})
for item in items:
temp_items[item['path']] = item
item['context_menu'] = context_menu(
item['path'], item['info'].get('video_id')
)
temp_items.sync()
update_on_pageswitch = plugin.get_setting('update_on_pageswitch', bool)
is_update = update_on_pageswitch and 'update' in plugin.request.args
finish_kwargs = {
'sort_methods': ('playlist_order', 'label'),
'update_listing': is_update
}
if has_icons and plugin.get_setting('force_viewmode', bool):
finish_kwargs['view_mode'] = 'thumbnail'
return plugin.finish(items, **finish_kwargs)
@plugin.route('/video/<video_id>/play')
def watch_video(video_id):
video = scraper.get_video(video_id)
if 'hls_playlist' in video:
__log('watch_video using HLS')
video_url = video['hls_playlist']
elif not video['rtmpurl']:
__log('watch_video using FLV')
video_url = video['filepath'] + video['file']
else:
__log('watch_video using RTMPE or RTMPT')
video_url = (
'%(rtmpurl)s '
'tcUrl=%(rtmpurl)s '
'swfVfy=%(swfobj)s '
'pageUrl=%(pageurl)s '
'playpath=%(playpath)s'
) % video
__log('watch_video finished with url: %s' % video_url)
return plugin.set_resolved_url(video_url)
@plugin.route('/my_favs/add/<item_path>')
def add_to_my_favs(item_path):
my_fav_items = plugin.get_storage('my_fav_items')
temp_items = plugin.get_storage('temp_items')
my_fav_items[item_path] = temp_items[item_path]
my_fav_items.sync()
@plugin.route('/my_favs/del/<item_path>')
def del_from_my_favs(item_path):
my_fav_items = plugin.get_storage('my_fav_items')
if item_path in my_fav_items:
del my_fav_items[item_path]
my_fav_items.sync()
def __keyboard(title, text=''):
keyboard = xbmc.Keyboard(text, title)
keyboard.doModal()
if keyboard.isConfirmed() and keyboard.getText():
return keyboard.getText()
def _(string_id):
if string_id in STRINGS:
return plugin.get_string(STRINGS[string_id])
else:
plugin.log.warning('String is missing: %s' % string_id)
return string_id
def __log(text):
plugin.log.info(text)
if __name__ == '__main__':
try:
plugin.run()
except scraper.NetworkError:
plugin.notify(msg=_('network_error'))
|
import os
from PyQt4 import QtGui
from ltmt.ui.users.add_user.Ui_addUser import Ui_AddUser
class AddUser(QtGui.QDialog):
"""This class provides a add user dialog feature to users page of LTMT"""
def __init__(self, configparser, parent=None):
"""Init method
@param self A AddUser instance
@param parent Parent QtGui.QWidget object
"""
self.configparser = configparser
self.parent = parent
QtGui.QDialog.__init__(self)
self.ui = Ui_AddUser()
self.ui.setupUi(self)
self.parseDefaults()
self.ui.detailsWid.hide()
def parseDefaults(self):
"""Parse some default values for new user accounts
@param self A AddUser instance
"""
with open("/etc/default/useradd", 'r') as ua:
for l in ua:
L = l.strip().split('=')
if len(L) >= 2:
if L[0] == "GROUP":
self.group = L[1]
elif L[0] == "HOME":
self.home = L[1]
elif L[0] == "SHELL":
self.shell = L[1]
def userChanged(self, username):
"""Slot called when user name was changed, updating entries
@param self A AddUser instance
@param username String username
"""
self.ui.initGLine.setText(self.group)
self.ui.homeLine.setText(os.path.join(self.home, username))
self.ui.shellLine.setText(self.shell)
def accept(self):
"""Reimplemented method QtGui.QDialog.accept
Add user to configparser before accept dialog
@param self A AddUser instance
"""
user = self.ui.nameLine.text()
print("__accepted__", user)
if user in self.configparser.getUsersList():
if QtGui.QMessageBox.warning(self, self.tr("Replace User"),
self.tr("Are you sure you want to overwrite \"{0}\" user?")\
.format(user), QtGui.QMessageBox.Yes | QtGui.QMessageBox.No,
QtGui.QMessageBox.No) == QtGui.QMessageBox.Yes:
self.configparser.delUser(user)
else:
return
self.configparser.addUser(user)
if self.ui.syncCheck.isChecked():
self.configparser.setUserSync(user, passwd=self.ui.pwLine.text(),
uid=self.ui.uidSpin.text(), init_group=self.ui.initGLine.text(),
groups=[g.strip() for g in self.ui.groupsLine.text().split(',')],
home=self.ui.homeLine.text(), shell=self.ui.shellLine.text())
QtGui.QDialog.accept(self)
|
eg.RegisterPlugin(
name = "TheaterTek",
author = "SurFan",
version = "0.0.1",
kind = "program",
guid = "{EF830DA5-EF08-4050-BAE0-D5FC0057D149}",
canMultiLoad = True,
createMacrosOnAdd = True,
description = (
'Adds actions to control <a href="http://www.theatertek.com/">TheaterTek</a>.'
'\n\n<p><b>Notice:</b><br>'
'To make it work, you have to enable TCP control in TheaterTek. '
),
url = "http://www.eventghost.net/forum/viewtopic.php?t=559",
icon = (
"iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACGElEQVR42m1RPWhaURQ+"
"gg6lCjooxnZIH9iARAi9QqwQTcBmioNQcWskwxV5i+AgDqHERZdsEfreUE3BDFJI4XUq"
"EeqeizjUnw6CKO1SUJ4lWKR5PXotjaZ3eefv+873nafT6/XT6dRkMp1+PgUA0Svy9Ozs"
"zfY2cbvdmLpcri/VOsyfDgiAxGP8SpgyxmSQFyUGQAjE2a0yWQJQoBL5i+MNWbeIcCAO"
"qwCNaLMkPhsilFyTa9zjiXtmXQeAcg9AGEoB5mBwAChHk7TBYBAIBNLpNJYvUhfq1x/L"
"Hti/7Yebh6VSCekbbxvomM+dn5df7b9cNY3ckWGkUqkgq9fgxUI2m81kMni0VqtVr9cP"
"PPt3NjCghEpUUhTl5ONJ0BLsdrsIlmVZFEWn09lsNtHYHEABvoO0JlFKY7FYuVxGbi4m"
"l8uFw+F8Pu/z+bCL4DkgBHRtxo0TuH0ymdjt9uMPxxs/N7BSLBbREpeMyxcA7bUGyw9t"
"7Jp2G41Gv9/vdDpcVTKZ5JIIxcMCE64ESzCIw8OrYdfSxTLqsVqttVotkUiYzeZQKKQz"
"Go3j8dhgMBwVjrZ+b/V6PVSMqd/vr1arGHAzKAan2+227vbb5K6Sd5/er68/xlMiIJVK"
"CYJgs9kikQiy4ImeOTZXARyzs/McR1VVLRQKaGBv7wWy+J/O/sx/APjGD39dXio3NyrG"
"o9EoGo0+efCIt/4ArUT50E11E2MAAAAASUVORK5CYII="
),
)
"""\
IP COMMANDS
-----------
TT->AP Sent from TT to client application
AP->TT Sent from client application to TT
TT<-->AP Sent from TT and can be polled by client.
Commands are sent ASCII in the form:
4 byte command, space, {parameter} CRLF
A successful command returns:
Command, space, 0
OR
Command, space, response
An unsuccessful command returns:
Command, space, -1
Example:
0000 // Client app
0000 TheaterTek DVD // Returned value
Enum values
-----------
IP_MEDIASTATE 0=Stopped/NoMedia, 1=Playing, 2=paused, 3=FF, 4=RW
IP_FULLSCREEN 0=Minimized, 1=Windowed, 2=Fullscreen
IP_GETPRIVATE Allows client to set/get a private string up to 1024 bytes on TT. This data persists as long as TT is running.
Auto Killer Commands
--------------------
WM_COMMANDS
-----------
"""
import asynchat
import socket
import asyncore
import threading
import new
ttRequests = (
('IP_APPLICATION', '0000', 'Request Application name'),
('IP_VERSION', '0001', 'Request Application version'),
('IP_FULLSCREEN', '0510', 'Request Fullscreen/windowed status'),
('IP_MEDIASTATE', '1000', 'Request MediaState'),
('IP_MEDIATIME', '1010', 'Request Media time'),
('IP_ENDOFMEDIA', '1030', 'End of media'),
('IP_FORMAT', '1040', 'Request Video Format'),
('IP_GETAR', '1300', 'Request Current Aspect Ratio'),
('IP_ARCOUNT', '1310', 'Request Aspect Ratio Count'),
('IP_ARNAMES', '1320', 'ARequest Aspect Ratio Names'),
('IP_CURFILE', '1400', 'Request Current file'),
('IP_DISKINSERTION', '1410', 'Disk inserted'),
('IP_DISKEJECTION', '1420', 'Disk ejected'),
('IP_DVDUNIQUEID', '1500', 'DVD unique ID'),
('IP_DVDTITLE', '1510', 'Request Current Title'),
('IP_DVDTITLECOUNT', '1520', 'Request Title count'),
('IP_DVDCHAPTER', '1600', 'Request Current Chapter'),
('IP_DVDCHAPTERCOUNT', '1610', 'Request Chapter count'),
('IP_DVDAUDIO', '1700', 'Request Current audio stream'),
('IP_DVDAUDIOCOUNT', '1720', 'Request Audio stream count'),
('IP_DVDAUDIONAMES', '1730', 'Request Audio stream names'),
('IP_DVDSUBTITLE', '1800', 'Request Current subtitle stream'),
('IP_DVDSUBTITLECOUNT', '1820', 'Request Subtitle stream count'),
('IP_DVDSUBTITLENAMES', '1830', 'Request Subtitle names (name|name)'),
('IP_DVDANGLE', '1900', 'Request Current angle'),
('IP_DVDANGLECOUNT', '1920', 'Request Angle count'),
('IP_DVDMENUMODE', '2000', 'Request Menu mode'),
('IP_DOMAIN', '2010', 'Request DVD Domain'),
('IP_GETVOLUME', '2100', 'Request Current volume'),
('IP_GETAUDIOOUTPUT', '2120', 'Request Current audio output'),
('IP_GETLISTCOUNT', '3050', 'Request Current list count'),
('IP_GETLIST', '3060', 'Request playlist'),
('IP_GETPRIVATE', '4010', 'Request Private app string'),
('IP_COUNTCHANGERS', '8110', 'CountChangers'),
)
ttCommands = (
('IP_FLASH', '0500', 'OSD Flash message','Message'),
('IP_MEDIAPOS', '1020', 'Set media time', 'Time(hh:mm:ss)'),
('IP_SETAR', '1330', 'Set Current AR', 'AR number'),
('IP_DVDPLAYTITLE', '1530', 'Play Title', 'Title Number'),
('IP_DVDPLAYCHAPTER', '1620', 'Play chapter', 'Chapter number'),
('IP_DVDPLAYTITCHAP', '1630', 'Play Chapter in Title', 'Title/Chapter (space delimited)'),
('IP_DVDSETAUDIO', '1710', 'Set audio stream','Stream number'),
('IP_DVDSETSUBTITLE', '1810', 'Set subtitle stream', 'Stream number (-1 to disable)'),
('IP_DVDSETANGLE', '1910', 'Set angle', 'Angle'),
('IP_SETVOLUME', '2110', 'Set Current volume', 'Volume'),
('IP_SETAUDIOOUTPUT', '2130', 'Set audio output', 'Audio Output'),
('IP_ADDBOOKMARK', '2200', 'Add a bookmark', ''),
('IP_NEXTBOOKMARK', '2210', 'Next bookmark', ''),
('IP_PREVBOOKMARK', '2220', 'Previous bookmark', ''),
('IP_PLAYFILE', '3000', 'Play file', 'Filename'),
('IP_ADDFILE', '3010', 'Add file to playlist', 'Filename'),
('IP_CLEARLIST', '3020', 'Clear playlist', ''),
('IP_PLAYATINDEX', '3040', 'Play item at index', 'Index'),
('IP_GETINDEX', '3030', 'Current item index', 'Index'),
('IP_DELATINDEX', '3070', 'Delete file at index', 'Index'),
('IP_SETPRIVATE', '4000', 'Private app string', 'String'),
('IP_KEYPRESS', '5010', 'Key code', 'Key-Code'),
('ID_PLAY', '32771', 'Play', ''),
('ID_STOP', '32772', 'Stop', ''),
('ID_PAUSE', '32773', 'Pause', ''),
('ID_NEXT', '32774', 'Next', ''),
('ID_PREVIOUS', '32775', 'Previous', ''),
('ID_EXIT', '32776', 'Exit', ''),
('ID_FF', '32777', 'FastForward', ''),
('ID_RW', '32778', 'Fast Rewind', ''),
('ID_MENU_LIST', '32779', 'Menu List', ''),
('ID_TITLE_MENU', '32780', 'Title Menu', ''),
('ID_FF_1X', '32782', 'Normal Play', ''),
('ID_FF_2X', '32784', 'Fast Forward 2x', ''),
('ID_FF_5X', '32785', 'Fast Forward 5x', ''),
('ID_FF_10X', '32786', 'Fast Forward 10x', ''),
('ID_FF_20X', '32787', 'Fast Forward 20x', ''),
('ID_FF_SLOW', '32788', 'Fast Forward Slow', ''),
('ID_RW_1X', '32790', 'Reverse Play', ''),
('ID_RW_2X', '32791', 'Fast Reverse 2X', ''),
('ID_RW_5X', '32792', 'Faste Reverse 5X', ''),
('ID_RW_10X', '32793', 'Fast Reverse 10X', ''),
('ID_RW_20X', '32794', 'Fast Reverse 20X', ''),
('ID_ROOT_MENU', '32796', 'Root Menu', ''),
('ID_AUDIO_MENU', '32797', 'Audio Menu', ''),
('ID_SUBTITLE_MENU', '32798', 'Subtitle Menu', ''),
('ID_CHAPTER_MENU', '32799', 'Chapter Menu', ''),
('ID_CC_ON', '32804', 'Closed Captions On', ''),
('ID_CC_OFF', '32805', 'Closed Captions Off', ''),
('ID_ABOUT', '32807', 'About', ''),
('ID_SUB_OFF', '32808', 'Subtitles Off', ''),
('ID_ASPECT_DEFINE', '32810', 'Define Aspect Ratio', ''),
('ID_ASPECT_ANAM', '32811', 'AR anamorph', ''),
('ID_ASPECT_NONANAM', '32812', 'AR non anamorph', ''),
('ID_ASPECT_LETTERBOX', '32813', 'AR Letterbox', ''),
('ID_BOOK_ADD', '32814', 'Add Bookmark', ''),
('ID_BUTTON32819', '32819', 'BUTTON32819', ''),
('ID_BUTTON32820', '32820', 'BUTTON32820', ''),
('ID_ONSCREEN', '32821', 'On Screen', ''),
('ID_VID_BRIGHTNESS', '32824', 'Brightness', ''),
('ID_VID_CONTRAST', '32825', 'Contrast', ''),
('ID_VID_HUE', '32826', 'Hue', ''),
('ID_VID_SATURATION', '32827', 'Saturation', ''),
('ID_OVERSCAN', '32828', 'Overscan', ''),
('ID_VID_GAMMA', '32829', 'Gamma', ''),
('ID_MENU_CHAPTER', '32830', 'Menu Chapter', ''),
('ID_MENU_AUDIO', '32831', 'Menu Audio', ''),
('ID_MENU_ANGLE', '32832', 'Menu Angle', ''),
('ID_MENU_FF', '32833', 'Menu FF', ''),
('ID_MENU_SUBTITLES', '32834', 'Menu Subtitles', ''),
('ID_CLOSED_CAPTIONS', '32835', 'Closed Captions', ''),
('ID_BOOK_DELETE', '32836', 'Delete Bookmark', ''),
('ID_ANGLE_MENU', '32837', 'Angle Menu', ''),
('ID_RESUME', '32838', 'Resume', ''),
('ID_MENU_TITLE', '32839', 'Menu Title', ''),
('ID_SETUP', '32841', 'Setup', ''),
('ID_ADJUSTVIDEO', '32842', 'Adjust Video', ''),
('ID_ASPECT_LOCK', '32843', 'Lock Aspect ratio', ''),
('ID_SETSTARTPOINT', '32846', 'Set Startpoint', ''),
('ID_K_RETURN', '32849', 'Key Return', ''),
('ID_K_UP', '32850', 'Key Up', ''),
('ID_K_DOWN', '32851', 'Key Down', ''),
('ID_K_LEFT', '32852', 'Key Left', ''),
('ID_K_RIGHT', '32853', 'Key Right', ''),
('ID_K_FF', '32854', 'Key FastForward', ''),
('ID_K_RW', '32855', 'Key Rewind', ''),
('ID_K_ESCAPE', '32856', 'Key Escape', ''),
('ID_NEXTAR', '32857', 'Next Aspect ratio', ''),
('ID_INFO', '32858', 'Info', ''),
('ID_ARFIRST', '32859', 'First Aspect Ratio', ''),
('ID_AR2', '32860', 'Aspect ratio 2', ''),
('ID_AR3', '32861', 'Aspect ratio 3', ''),
('ID_AR4', '32862', 'Aspect ratio 4', ''),
('ID_AR5', '32863', 'Aspect ratio 5', ''),
('ID_AR6', '32864', 'Aspect ratio 6', ''),
('ID_AR7', '32865', 'Aspect ratio 7', ''),
('ID_AR8', '32866', 'Aspect ratio 8', ''),
('ID_AR9', '32867', 'Aspect ratio 9', ''),
('ID_ARLAST', '32868', 'Last Aspect ratio', ''),
('ID_EJECT', '32870', 'Eject', ''),
('ID_CONTEXT', '32872', 'Context', ''),
('ID_ALTEXIT', '32873', 'ALT Exit', ''),
('ID_MINIMIZE', '32874', 'Minimize', ''),
('ID_NEXTSUB', '32875', 'Next Subtitle', ''),
('ID_NEXTAUDIO', '32876', 'Next Audio', ''),
('ID_REPLAY', '32877', 'Replay', ''),
('ID_JUMP', '32878', 'Jump', ''),
('ID_FRAMESTEP', '32879', 'Framestep', ''),
('ID_ABREPEAT', '32880', 'A/B-Repeat', ''),
('ID_CHAPTITREP', '32881', 'Chapter Title Repeat', ''),
('ID_NEXT_ANGLE', '32883', 'Next Angle', ''),
('ID_OPEN', '32884', 'Open', ''),
('ID_NEXT_TIT', '32885', 'Next Title', ''),
('ID_STATS', '32886', 'Statistics', ''),
('ID_CAPTURE', '32887', 'Capture', ''),
('ID_BK_RESUME', '32888', 'BK Resume', ''),
('ID_DEINTERLACE', '32889', 'Deinterlace', ''),
('ID_VOLUP', '32891', 'Volume Up', ''),
('ID_VOLDOWN', '32892', 'Volume Down', ''),
('ID_NEXTDISK', '32893', 'Next Disk', ''),
('ID_SHOWTIME', '32894', 'Show Time', ''),
('ID_CC_NUDGE_UP', '32895', 'CC Nudge Up', ''),
('ID_CC_NUDGE_DOWN', '32896', 'CC Nudge Down', ''),
('ID_UPGRADE', '32897', 'Upgrade', ''),
('ID_NEXT_FILE', '32898', 'Next File', ''),
('ID_PREVIOUS_FILE', '32899', 'Previous File', ''),
('ID_TSPROG', '32901', 'TSPROG', ''),
('ID_PREV_TIT', '32902', 'Previous Title', ''),
('ID_SLOW', '32904', 'Slow', ''),
('ID_CCTOGGLE', '32905', 'Closed Captions Toggle', ''),
('ID_AR11', '32906', 'Aspect ratio 11', ''),
('ID_AR12', '32907', 'Aspect ratio 12', ''),
('ID_AR13', '32908', 'Aspect ratio 13', ''),
('ID_AR14', '32909', 'Aspect ratio 14', ''),
('ID_AR15', '32910', 'Aspect ratio 15', ''),
('ID_AR16', '32911', 'Aspect ratio 16', ''),
('ID_AR17', '32912', 'Aspect ratio 17', ''),
('ID_AR18', '32913', 'Aspect ratio 18', ''),
('ID_AR19', '32914', 'Aspect ratio 19', ''),
('ID_AR20', '32915', 'Aspect ratio 20', ''),
('ID_VMRSTATS', '32916', 'VMR Statistics', ''),
('ID_LIPDOWN', '32917', 'Lipsync down', ''),
('ID_LIPUP', '32918', 'Lipsync Up', ''),
('ID_MUTE', '32919', 'Mute', ''),
('ID_BLANKING', '32920', 'Blanking', ''),
('ID_TOGGLE', '32922', 'Toggle', ''),
('ID_MOVELEFT', '32924', 'Move Left', ''),
('ID_MOVERIGHT', '32925', 'Move Right', ''),
('ID_MOVEUP', '32926', 'Move Up', ''),
('ID_MOVEDOWN', '32927', 'Move Down', ''),
('ID_H_EXPAND', '32928', 'Horizontal Expand', ''),
('ID_H_CONTRACT', '32929', 'Horizontal Contract', ''),
('ID_V_EXPAND', '32930', 'Vertical Expand', ''),
('ID_V_CONTRACT', '32931', 'Vertical Contract', ''),
('ID_ZOOM_IN', '32932', 'Zoom In', ''),
('ID_ZOOM_OUT', '32933', 'Zoom Out', ''),
('ID_BL_LEFT', '32934', 'BL_LEFT', ''),
('ID_BL_RIGHT', '32935', 'BL_RIGHT', ''),
('ID_BT_UP', '32936', 'BT_UP', ''),
('ID_BT_DOWN', '32937', 'BT_DOWN', ''),
('ID_BR_LEFT', '32938', 'BR_LEFT', ''),
('ID_BR_RIGHT', '32939', 'BR_RIGHT', ''),
('ID_BB_UP', '32940', 'BB_UP', ''),
('ID_BB_DOWN', '32941', 'BB_DOWN', ''),
('ID_STREAM', 32943, 'STREAM', ''),
)
ttAutoKillerAndChangerCommands = (
('IP_LAUNCH', '8000', 'Launch AutoKiller'),
('IP_QUIT', '8010', 'Quit Autokiller'),
('IP_MOUNTDISK', '8020', 'Mount Disk', 'Changer/Slot (comma delimited)'),
('IP_UNMOUNTDISK', '8030', 'Unmount Disk', 'Changer/Slot (comma delimited)'),
('IP_EJECTDISK', '8040', 'Eject Disk', 'Changer/Slot (comma delimited)'),
('IP_GETSLOTDATA', '8050', 'GETSLOTDATA', 'Changer, Slot'),
('IP_GETDRIVEDATA', '8060', 'GETDRIVEDATA', 'Changer ->DriveData'),
('IP_CHECKCHANGED', '8070', 'CHECKCHANGED'),
('IP_REBUILDDATA', '8080', 'REBUILDDATA'),
('IP_DATACHANGED', '8100', 'Notification of data change'),
)
class TheaterTekSession(asynchat.async_chat):
"""
Handles a Theatertek TCP/IP session.
"""
def __init__ (self, plugin, address):
self.plugin = plugin
# Call constructor of the parent class
asynchat.async_chat.__init__(self)
# Set up input line terminator
self.set_terminator('\r\n')
# Initialize input data buffer
self.buffer = ''
# create and connect a socket
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
eg.RestartAsyncore()
self.settimeout(1.0)
try:
self.connect(address)
except:
pass
def handle_connect(self):
"""
Called when the active opener's socket actually makes a connection.
"""
self.plugin.TriggerEvent("Connected")
def handle_expt(self):
# connection failed
self.plugin.isSessionRunning = False
self.plugin.TriggerEvent("NoConnection")
self.close()
def handle_close(self):
"""
Called when the channel is closed.
"""
self.plugin.isSessionRunning = False
self.plugin.TriggerEvent("ConnectionLost")
self.close()
def collect_incoming_data(self, data):
"""
Called with data holding an arbitrary amount of received data.
"""
self.buffer = self.buffer + data
def found_terminator(self):
"""
Called when the incoming data stream matches the termination
condition set by set_terminator.
"""
# call the plugins handler method
self.plugin.ValueUpdate(self.buffer)
# reset the buffer
self.buffer = ''
class stdAction(eg.ActionClass):
def __call__(self):
self.plugin.DoCommand(self.value)
class stdActionWithStringParameter(eg.ActionWithStringParameter):
def __call__(self, Param):
self.plugin.DoCommand(self.value + " " + Param)
class wmAction(eg.ActionClass):
def __call__(self):
self.plugin.DoCommand("5000 " + self.value)
class TheaterTek(eg.PluginClass):
def __init__(self):
self.host = "localhost"
self.port = 2663
self.isSessionRunning = False
self.timeline = ""
self.waitStr = None
self.waitFlag = threading.Event()
self.PlayState = -1
self.lastMessage = {}
self.lastSubtitleNum = 0
self.lastSubtitlesEnabled = False
self.lastAudioTrackNum = 0
group = self.AddGroup('Requests')
for className, scancode, descr in ttRequests:
clsAttributes = dict(name=descr, value=scancode)
cls = new.classobj(className, (stdAction,), clsAttributes)
group.AddAction(cls)
group = self.AddGroup('Commands')
for className, scancode, descr, ParamDescr in ttCommands:
clsAttributes = dict(name=descr, value=scancode)
if ParamDescr == "":
if className[0:3] == "IP_":
cls = new.classobj(className, (stdAction,), clsAttributes)
else:
cls = new.classobj(className, (wmAction,), clsAttributes)
else:
cls = new.classobj(className, (stdActionWithStringParameter,), clsAttributes)
cls.parameterDescription = ParamDescr
group.AddAction(cls)
def __start__(
self,
host="localhost",
port=2663,
dummy1=None,
dummy2=None,
useNewEvents=False
):
self.host = host
self.port = port
self.events = self.ttEvents
ttEvents = {
"0000": "ApplicationName",
"0001": "Version",
"0500": "OSD",
"0510": (
"WindowState",
{
"0": "Minimized",
"1": "Windowed",
"2": "Fullscreen"
},
),
"1000": (
"MediaState",
{
"0": "Stopped",
"1": "Playing",
"2": "Paused",
"3": "FF",
"4": "RW"
},
),
"1010": "MediaTime",
"1030": "EndOfMedia",
"1040": (
"Format",
{
"0": "NTSC",
"1": "PAL",
},
),
"1300": "AspectRatio",
"1310": "AspectRatioCount",
"1320": "AspectRatioNames",
"1400": "Currentfile",
"1410": "DiskInserted",
"1420": "DiskEjected",
"1500": "DVDUniqueID",
"1510": "CurrentTitle",
"1520": "TitleCount",
"1600": "CurrentChapter",
"1610": "ChapterCount",
"1700": "CurrentAudioStream",
"1720": "AudioStreamCount",
"1730": "AudioStreamNames",
"1800": "CurrentSubtitleStream",
"1820": "SubtitleStreamCount",
"1830": "SubtitleNames",
"1900": "CurrentAngle",
"1920": "AngleCount",
"2000": (
"MenuMode",
{
"0": "Off",
"1": "On",
},
),
"2010": "DVDDomain",
"2100": "CurrentVolume",
"2120": "CurrentAudioOutput",
"3050": "CurrentListCount",
"3060": "Playlist",
"4010": "PrivateAppString",
"8110": "CountChangers",
}
def ValueUpdate(self, text):
if text == self.waitStr:
self.waitStr = None
self.waitFlag.set()
return
header = text[0:4]
state = text[5:].decode('utf-8')
self.lastMessage[header] = state
ttEvent = self.ttEvents.get(header, None)
if ttEvent is not None:
if type(ttEvent) == type({}):
eventString = ttEvent.get(state, None)
if eventString is not None:
self.TriggerEvent(eventString)
else:
self.TriggerEvent(header, [state])
elif type(ttEvent) == type(()):
suffix2 = ttEvent[1].get(state, None)
if suffix2 is not None:
self.TriggerEvent(ttEvent[0] + "." + suffix2)
else:
self.TriggerEvent(ttEvent[0] + "." + str(state))
else:
if state == "":
self.TriggerEvent(ttEvent)
else:
self.TriggerEvent(ttEvent, [state])
return
else:
self.TriggerEvent(header, [state])
@eg.LogIt
def DoCommand(self, cmdstr):
self.waitFlag.clear()
self.waitStr = cmdstr
if not self.isSessionRunning:
self.session = TheaterTekSession(self, (self.host, self.port))
self.isSessionRunning = True
try:
self.session.sendall(cmdstr + "\r\n")
except:
self.isSessionRunning = False
self.TriggerEvent('close')
self.session.close()
self.waitFlag.wait(1.0)
self.waitStr = None
self.waitFlag.set()
def SetOSD(self, text):
self.DoCommand("1200 " + text)
def Configure(
self,
host="localhost",
port=2663,
dummy1=None,
dummy2=None
):
panel = eg.ConfigPanel(self)
hostEdit = panel.TextCtrl(host)
portEdit = panel.SpinIntCtrl(port, max=65535)
panel.AddLine("TCP/IP host:", hostEdit)
panel.AddLine("TCP/IP port:", portEdit)
while panel.Affirmed():
panel.SetResult(
hostEdit.GetValue(),
portEdit.GetValue(),
None,
None
)
class MyCommand(eg.ActionWithStringParameter):
name = "Raw Command"
def __call__(self, cmd):
self.plugin.DoCommand(cmd)
|
import itertools
import sys
from flask import abort, g, render_template, request, redirect, Blueprint, flash, url_for, current_app
from flask.ext.login import login_required, current_user
from realms.lib.util import to_canonical, remove_ext, gravatar_url
from .models import PageNotFound
blueprint = Blueprint('wiki', __name__)
@blueprint.route("/_commit/<sha>/<path:name>")
def commit(name, sha):
if current_app.config.get('PRIVATE_WIKI') and current_user.is_anonymous():
return current_app.login_manager.unauthorized()
cname = to_canonical(name)
data = g.current_wiki.get_page(cname, sha=sha)
if not data:
abort(404)
return render_template('wiki/page.html', name=name, page=data, commit=sha)
@blueprint.route(r"/_compare/<path:name>/<regex('\w+'):fsha><regex('\.{2,3}'):dots><regex('\w+'):lsha>")
def compare(name, fsha, dots, lsha):
if current_app.config.get('PRIVATE_WIKI') and current_user.is_anonymous():
return current_app.login_manager.unauthorized()
diff = g.current_wiki.compare(name, fsha, lsha)
return render_template('wiki/compare.html',
name=name, diff=diff, old=fsha, new=lsha)
@blueprint.route("/_revert", methods=['POST'])
@login_required
def revert():
cname = to_canonical(request.form.get('name'))
commit = request.form.get('commit')
message = request.form.get('message', "Reverting %s" % cname)
if not current_app.config.get('ALLOW_ANON') and current_user.is_anonymous():
return dict(error=True, message="Anonymous posting not allowed"), 403
if cname in current_app.config.get('WIKI_LOCKED_PAGES'):
return dict(error=True, message="Page is locked"), 403
try:
sha = g.current_wiki.revert_page(cname,
commit,
message=message,
username=current_user.username,
email=current_user.email)
except PageNotFound as e:
return dict(error=True, message=e.message), 404
if sha:
flash("Page reverted")
return dict(sha=sha)
@blueprint.route("/_history/<path:name>")
def history(name):
if current_app.config.get('PRIVATE_WIKI') and current_user.is_anonymous():
return current_app.login_manager.unauthorized()
hist = g.current_wiki.get_history(name)
for item in hist:
item['gravatar'] = gravatar_url(item['author_email'])
return render_template('wiki/history.html', name=name, history=hist)
@blueprint.route("/_edit/<path:name>")
@login_required
def edit(name):
cname = to_canonical(name)
page = g.current_wiki.get_page(name)
if not page:
# Page doesn't exist
return redirect(url_for('wiki.create', name=cname))
name = remove_ext(page['path'])
g.assets['js'].append('editor.js')
return render_template('wiki/edit.html',
name=name,
content=page.get('data'),
info=page.get('info'),
sha=page.get('sha'),
partials=page.get('partials'))
@blueprint.route("/_create/", defaults={'name': None})
@blueprint.route("/_create/<path:name>")
@login_required
def create(name):
cname = to_canonical(name) if name else ""
if cname and g.current_wiki.get_page(cname):
# Page exists, edit instead
return redirect(url_for('wiki.edit', name=cname))
g.assets['js'].append('editor.js')
return render_template('wiki/edit.html',
name=cname,
content="",
info={})
def _get_subdir(path, depth):
parts = path.split('/', depth)
if len(parts) > depth:
return parts[-2]
def _tree_index(items, path=""):
depth = len(path.split("/"))
items = filter(lambda x: x['name'].startswith(path), items)
items = sorted(items, key=lambda x: x['name'])
for subdir, items in itertools.groupby(items, key=lambda x: _get_subdir(x['name'], depth)):
if not subdir:
for item in items:
yield dict(item, dir=False)
else:
size = 0
ctime = sys.maxint
mtime = 0
for item in items:
size += item['size']
ctime = min(item['ctime'], ctime)
mtime = max(item['mtime'], mtime)
yield dict(name=path + subdir + "/",
mtime=mtime,
ctime=ctime,
size=size,
dir=True)
@blueprint.route("/_index", defaults={"path": ""})
@blueprint.route("/_index/<path:path>")
def index(path):
if current_app.config.get('PRIVATE_WIKI') and current_user.is_anonymous():
return current_app.login_manager.unauthorized()
items = g.current_wiki.get_index()
if path:
path = to_canonical(path) + "/"
return render_template('wiki/index.html', index=_tree_index(items, path=path), path=path)
@blueprint.route("/<path:name>", methods=['POST', 'PUT', 'DELETE'])
@login_required
def page_write(name):
cname = to_canonical(name)
if not cname:
return dict(error=True, message="Invalid name")
if not current_app.config.get('ALLOW_ANON') and current_user.is_anonymous():
return dict(error=True, message="Anonymous posting not allowed"), 403
if request.method == 'POST':
# Create
if cname in current_app.config.get('WIKI_LOCKED_PAGES'):
return dict(error=True, message="Page is locked"), 403
sha = g.current_wiki.write_page(cname,
request.form['content'],
message=request.form['message'],
create=True,
username=current_user.username,
email=current_user.email)
elif request.method == 'PUT':
edit_cname = to_canonical(request.form['name'])
if edit_cname in current_app.config.get('WIKI_LOCKED_PAGES'):
return dict(error=True, message="Page is locked"), 403
if edit_cname != cname:
g.current_wiki.rename_page(cname, edit_cname)
sha = g.current_wiki.write_page(edit_cname,
request.form['content'],
message=request.form['message'],
username=current_user.username,
email=current_user.email)
return dict(sha=sha)
elif request.method == 'DELETE':
# DELETE
if cname in current_app.config.get('WIKI_LOCKED_PAGES'):
return dict(error=True, message="Page is locked"), 403
sha = g.current_wiki.delete_page(cname,
username=current_user.username,
email=current_user.email)
return dict(sha=sha)
@blueprint.route("/", defaults={'name': 'home'})
@blueprint.route("/<path:name>")
def page(name):
if current_app.config.get('PRIVATE_WIKI') and current_user.is_anonymous():
return current_app.login_manager.unauthorized()
cname = to_canonical(name)
if cname != name:
return redirect(url_for('wiki.page', name=cname))
data = g.current_wiki.get_page(cname)
if data:
return render_template('wiki/page.html', name=cname, page=data, partials=data.get('partials'))
else:
return redirect(url_for('wiki.create', name=cname))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.