code
stringlengths 1
199k
|
|---|
from os import listdir, sep
from os.path import abspath, basename, isdir
from sys import argv
def tree(dir, padding, print_files=False):
print(padding[:-1] + '+-' + basename(abspath(dir)) + '/')
padding = padding + ' '
files = []
if print_files:
files = listdir(dir)
else:
files = [x for x in listdir(dir) if isdir(dir + sep + x)]
count = 0
for file in files:
count += 1
print(padding + '|')
path = dir + sep + file
if isdir(path):
if count == len(files):
tree(path, padding + ' ', print_files)
else:
tree(path, padding + '|', print_files)
else:
print(padding + '+-' + file)
def usage():
return '''Usage: %s [-f] <PATH>
Print tree structure of path specified.
Options:
-f Print files as well as directories
PATH Path to process''' % basename(argv[0])
def main():
if len(argv) == 1:
print(usage())
elif len(argv) == 2:
# print just directories
path = argv[1]
if isdir(path):
tree(path, ' ')
else:
print('ERROR: \'' + path + '\' is not a directory')
elif len(argv) == 3 and argv[1] == '-f':
# print directories and files
path = argv[2]
if isdir(path):
tree(path, ' ', True)
else:
print('ERROR: \'' + path + '\' is not a directory')
else:
print(usage())
if __name__ == '__main__':
main()
|
(S'0944451bd301bb4eaa393ea2b50a396c'
p1
(ihappydoclib.parseinfo.moduleinfo
ModuleInfo
p2
(dp3
S'_namespaces'
p4
((dp5
S'SmartsPattern'
p6
(ihappydoclib.parseinfo.classinfo
ClassInfo
p7
(dp8
g4
((dp9
(dp10
S'__del__'
p11
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p12
(dp13
g4
((dp14
(dp15
tp16
sS'_exception_info'
p17
(dp18
sS'_parameter_names'
p19
(S'self'
p20
tp21
sS'_parameter_info'
p22
(dp23
g20
(NNNtp24
ssS'_filename'
p25
S'../python/frowns/Smarts/Pattern.py'
p26
sS'_docstring'
p27
S''
sS'_name'
p28
g11
sS'_parent'
p29
g7
sS'_comment_info'
p30
(dp31
sS'_configuration_values'
p32
(dp33
sS'_class_info'
p34
g14
sS'_function_info'
p35
g15
sS'_comments'
p36
S''
sbsS'dump'
p37
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p38
(dp39
g4
((dp40
(dp41
tp42
sg17
(dp43
sg19
(S'self'
p44
tp45
sg22
(dp46
g44
(NNNtp47
ssg25
g26
sg27
S''
sg28
g37
sg29
g7
sg30
g31
sg32
(dp48
sg34
g40
sg35
g41
sg36
S''
sbsS'__init__'
p49
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p50
(dp51
g4
((dp52
(dp53
tp54
sg17
(dp55
sg19
(S'self'
p56
tp57
sg22
(dp58
g56
(NNNtp59
ssg25
g26
sg27
S''
sg28
g49
sg29
g7
sg30
g31
sg32
(dp60
sg34
g52
sg35
g53
sg36
S''
sbstp61
sg25
g26
sg27
S''
sS'_class_member_info'
p62
(lp63
sg28
g6
sg29
g2
sg30
g31
sS'_base_class_info'
p64
(lp65
sg32
(dp66
sg34
g9
sg35
g10
sg36
S''
sbsS'Bond'
p67
(ihappydoclib.parseinfo.classinfo
ClassInfo
p68
(dp69
g4
((dp70
(dp71
S'dump_info'
p72
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p73
(dp74
g4
((dp75
(dp76
tp77
sg17
(dp78
sg19
(S'self'
p79
S'atoms'
p80
tp81
sg22
(dp82
g79
(NNNtp83
sg80
(NNNtp84
ssg25
g26
sg27
S''
sg28
g72
sg29
g68
sg30
g31
sg32
(dp85
sg34
g75
sg35
g76
sg36
S''
sbsS'__eq__'
p86
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p87
(dp88
g4
((dp89
(dp90
tp91
sg17
(dp92
sg19
(S'self'
p93
S'bond'
p94
tp95
sg22
(dp96
g93
(NNNtp97
sg94
(NNNtp98
ssg25
g26
sg27
S''
sg28
g86
sg29
g68
sg30
g31
sg32
(dp99
sg34
g89
sg35
g90
sg36
S''
sbsS'__init__'
p100
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p101
(dp102
g4
((dp103
(dp104
tp105
sg17
(dp106
sg19
(S'self'
p107
S'matcher'
p108
S'component_number'
p109
S'generator'
p110
tp111
sg22
(dp112
g108
(NNNtp113
sg107
(NNNtp114
sg109
(NNNtp115
sg110
(I1
S'defaultGenerator'
Ntp116
ssg25
g26
sg27
S''
sg28
g100
sg29
g68
sg30
g31
sg32
(dp117
sg34
g103
sg35
g104
sg36
S''
sbstp118
sg25
g26
sg27
S''
sg62
(lp119
sg28
g67
sg29
g2
sg30
g31
sg64
(lp120
sg32
(dp121
sg34
g70
sg35
g71
sg36
S''
sbsS'Atom'
p122
(ihappydoclib.parseinfo.classinfo
ClassInfo
p123
(dp124
g4
((dp125
(dp126
S'dump_info'
p127
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p128
(dp129
g4
((dp130
(dp131
tp132
sg17
(dp133
sg19
(S'self'
p134
S'bonds'
p135
tp136
sg22
(dp137
g134
(NNNtp138
sg135
(NNNtp139
ssg25
g26
sg27
S''
sg28
g127
sg29
g123
sg30
g31
sg32
(dp140
sg34
g130
sg35
g131
sg36
S''
sbsS'__eq__'
p141
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p142
(dp143
g4
((dp144
(dp145
tp146
sg17
(dp147
sg19
(S'self'
p148
S'atom'
p149
tp150
sg22
(dp151
g148
(NNNtp152
sg149
(NNNtp153
ssg25
g26
sg27
S''
sg28
g141
sg29
g123
sg30
g31
sg32
(dp154
sg34
g144
sg35
g145
sg36
S''
sbsS'__init__'
p155
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p156
(dp157
g4
((dp158
(dp159
tp160
sg17
(dp161
sg19
(S'self'
p162
S'matcher'
p163
S'component_number'
p164
S'generator'
p165
tp166
sg22
(dp167
g163
(NNNtp168
sg162
(NNNtp169
sg164
(NNNtp170
sg165
(I1
S'defaultGenerator'
Ntp171
ssg25
g26
sg27
S''
sg28
g155
sg29
g123
sg30
g31
sg32
(dp172
sg34
g158
sg35
g159
sg36
S''
sbstp173
sg25
g26
sg27
S''
sg62
(lp174
sg28
g122
sg29
g2
sg30
g31
sg64
(lp175
sg32
(dp176
sg34
g125
sg35
g126
sg36
S''
sbs(dp177
tp178
sS'_import_info'
p179
(ihappydoclib.parseinfo.imports
ImportInfo
p180
(dp181
S'_named_imports'
p182
(dp183
S'frowns.IdGenerator'
p184
(lp185
S'defaultGenerator'
p186
assS'_straight_imports'
p187
(lp188
S'string'
p189
asbsg25
g26
sg27
S''
sg28
S'Pattern'
p190
sg29
Nsg30
g31
sg32
(dp191
S'include_comments'
p192
I1
sS'cacheFilePrefix'
p193
S'.happydoc.'
p194
sS'useCache'
p195
I1
sS'docStringFormat'
p196
S'StructuredText'
p197
ssg34
g5
sg35
g177
sg36
S''
sbt.
|
class Info(object):
"""
"""
def __init__(self, server, handler='info'):
self._server = server
self._handler = handler
def activityinfo(self):
"""Activity Info.
"""
return self._server.post("info/activityinfo")
|
""" cv_bridge_demo.py - Version 0.1 2011-05-29
A ROS-to-OpenCV node that uses cv_bridge to map a ROS image topic and optionally a ROS
depth image topic to the equivalent OpenCV image stream(s).
Created for the Pi Robot Project: http://www.pirobot.org
Copyright (c) 2011 Patrick Goebel. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details at:
http://www.gnu.org/licenses/gpl.html
"""
import roslib
import rospy
import sys
import cv2
import message_filters
import cv2.cv as cv
from sensor_msgs.msg import Image, CameraInfo
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
class cvBridgeDemo():
def __init__(self):
def nothing(x):
pass
self.node_name = "cv_bridge_demo"
rospy.init_node(self.node_name)
# What we do during shutdown
rospy.on_shutdown(self.cleanup)
# Create the OpenCV display window for the RGB image
self.cv_window_name = self.node_name
cv.NamedWindow(self.cv_window_name, cv.CV_WINDOW_NORMAL)
cv.MoveWindow(self.cv_window_name, 25, 75)
# And one for the depth image
cv.NamedWindow("Depth Image", cv.CV_WINDOW_NORMAL)
cv.MoveWindow("Depth Image", 25, 350)
# And one for the depth image
cv.NamedWindow("Histogram", cv.CV_WINDOW_NORMAL)
cv.MoveWindow("Histogram", 480, 350)
# Create the cv_bridge object
self.bridge = CvBridge()
# Subscribe to the camera image and depth topics and set
# the appropriate callbacks
self.RGB_sub = message_filters.Subscriber('/camera/rgb/image_rect_color', Image)
self.Depth_sub = message_filters.Subscriber('/camera/depth/image', Image)
# rospy.Subscriber('Scene',String,callbackS)
self.ts = message_filters.ApproximateTimeSynchronizer([self.RGB_sub, self.Depth_sub], 1,1)
# self.image_sub = rospy.Subscriber("/camera/rgb/image_rect_color", Image, self.image_callback)
# self.depth_sub = rospy.Subscriber("/camera/depth/image_raw", Image, self.depth_callback)
self.ts.registerCallback(self.image_callback)
self.depth = np.zeros((300,200))
rospy.loginfo("Waiting for image topics...")
def image_callback(self, ros_image, depth_image):
# Use cv_bridge() to convert the ROS image to OpenCV format
try:
frame = self.bridge.imgmsg_to_cv2(ros_image, "bgr8")
d_image = self.bridge.imgmsg_to_cv2(depth_image, "8UC1")
except CvBridgeError, e:
print e
# Convert the image to a Numpy array since most cv2 functions
# require Numpy arrays.
frame = np.array(frame, dtype=np.uint8)
depth_array = np.array(d_image, dtype=np.uint8)
# Normalize the depth image to fall between 0 (black) and 1 (white)
cv2.normalize(depth_array, depth_array, 0, 255, cv2.NORM_MINMAX)
# Process the frame using the process_image() function
display_image,Map = self.process_image(frame,depth_array)
# Display the image.
cv2.imshow(self.node_name, display_image)
cv2.imshow("Histogram", Map)
cv2.imshow("Depth Image", depth_array)
# Process any keyboard commands
self.keystroke = cv.WaitKey(5)
if 32 <= self.keystroke and self.keystroke < 128:
cc = chr(self.keystroke).lower()
if cc == 'q':
# The user has press the q key, so exit
rospy.signal_shutdown("User hit q key to quit.")
def depth_callback(self, ros_image):
# Use cv_bridge() to convert the ROS image to OpenCV format
try:
# The depth image is a single-channel float32 image
depth_image = self.bridge.imgmsg_to_cv2(ros_image, "8UC1")
except CvBridgeError, e:
print e
# Convert the depth image to a Numpy array since most cv2 functions
# require Numpy arrays.
depth_array = np.array(depth_image, dtype=np.uint8)
# Normalize the depth image to fall between 0 (black) and 1 (white)
cv2.normalize(depth_array, depth_array, 0, 255, cv2.NORM_MINMAX)
# Process the depth image
depth_display_image = self.process_depth_image(depth_array)
# Display the result
cv2.imshow("Depth Image", depth_display_image)
def process_image(self, frame,depth):
# Convert to greyscale
image = frame
# hls = cv2.cvtColor(frame, cv2.COLOR_BGR2HLS)
# # # define range of blue color in HSV
# h, l, s = cv2.split(hls)
# lower_blue = np.array([180])
# upper_blue = np.array([255])
# l1 = l
# l1 = l1-30
# # # Threshold the HSV image to get only blue colors
# mask = cv2.inRange(l, lower_blue, upper_blue)
# l = np.where(mask==255,l1,l)
# image = cv2.merge((h, l, s))
# image = cv2.cvtColor(image,cv2.COLOR_HLS2BGR)
# Map = np.zeros((image.shape[0],image.shape[1],1),np.uint8)
# Map_1 = np.ma.masked_less(image[:,:,0],20)
Map_1 = (image[:,:,0] > 20).astype(int)
Map_1 = cv2.merge((Map_1,Map_1,Map_1))
# Map_2 = np.ma.masked_less(image[:,:,1],40)
Map_2 = (image[:,:,1] > 40).astype(int)
Map_2 = cv2.merge((Map_2,Map_2,Map_2))
# Map_3 = np.ma.masked_less(image[:,:,2],95)
Map_3 = (image[:,:,2] > 95).astype(int)
Map_3 = cv2.merge((Map_3,Map_3,Map_3))
# Map_4 = np.ma.masked_less(image[:,:,0]-image[:,:,1],0)
Map_4 = (image[:,:,0] < image[:,:,1]).astype(int)
Map_4 = cv2.merge((Map_4,Map_4,Map_4))
# Map_5 = np.ma.masked_less(image[:,:,1]-image[:,:,2],15)
Map_5 = (image[:,:,2]-image[:,:,1] > 15).astype(int)
Map_5 = cv2.merge((Map_5,Map_5,Map_5))
Map_6 = (image[:,:,2] > image[:,:,1]).astype(int)
Map_6 = cv2.merge((Map_6,Map_6,Map_6))
Map_7 = (depth[:,:,0] < 50).astype(int)
Map_7 = cv2.merge((Map_7,Map_7,Map_7))
# for i in range(image.shape[0]):
# for j in range(image.shape[1]):
# r = int(image[i,j,2])
# g = int(image[i,j,1])
# b = int(image[i,j,0])
# # d = int(self.depth[i,j,0])
# if((abs(r-g) >15) & (r>g) & (g>b) & (r>95) & (g>40) & (b>20)):
# Map[i,j,0]=255
# Setup SimpleBlobDetector parameters.
zeros = np.zeros(image.shape,image.dtype)
ones = np.ones(image.shape,image.dtype)
ones *= 255
Map = np.where(Map_1!=0,ones,zeros)
Map = np.where(Map_2==0,zeros,Map)
Map = np.where(Map_3==0,zeros,Map)
Map = np.where(Map_4==0,zeros,Map)
Map = np.where(Map_5==0,zeros,Map)
Map = np.where(Map_6==0,zeros,Map)
Map = np.where(Map_7==0,zeros,Map)
element = cv2.getStructuringElement(cv2.MORPH_CROSS,(3,3))
eroded = cv2.erode(Map,element)
temp = cv2.dilate(eroded,element)
temp = temp[:,:,0]
(cnts, _) = cv2.findContours(temp.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
# loop over the contours
print cnts.__len__()
print max(cnts)
for c in cnts:
# if the contour is too small, ignore it
if cv2.contourArea(c) < 10000:
continue
# compute the bounding box for the contour, draw it on the frame,
# and update the text
(x, y, w, h) = cv2.boundingRect(c)
hull = cv2.convexHull(c)
ellipse = cv2.fitEllipse(c)
cv2.ellipse(image,ellipse,(0,255,0),2)
cv2.rectangle(image, (x, y), (x + w, y + h), (0, 255, 0), 2)
cv2.drawContours(image,[hull],0,(0,0,255),2)
# th2 = cv2.adaptiveThreshold(frame[:,:,0],255,cv2.ADAPTIVE_THRESH_MEAN_C,cv2.THRESH_BINARY,9,1)
# th3 = cv2.adaptiveThreshold(img,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,11,2)
return image,temp
def process_depth_image(self, frame):
# Just return the raw image for this demo
self.depth=frame
return frame
def cleanup(self):
print "Shutting down vision node."
cv2.destroyAllWindows()
def main(args):
try:
cvBridgeDemo()
rospy.spin()
except KeyboardInterrupt:
print "Shutting down vision node."
cv.DestroyAllWindows()
if __name__ == '__main__':
main(sys.argv)
|
import re
class DialplanExecutionAnalyzer(object):
def analyze(self, dialplan_parse_result, log_parse_result):
line_analyses = self._do_lines_analyses(dialplan_parse_result, log_parse_result)
return _Analysis(dialplan_parse_result.filename, line_analyses)
def _do_lines_analyses(self, dialplan_parse_result, log_parse_result):
line_analyses = []
for line in dialplan_parse_result.lines:
is_executed = self._is_line_executed(line, log_parse_result, dialplan_parse_result)
line_analysis = _LineAnalysis(line.content, line.is_executable, is_executed)
line_analyses.append(line_analysis)
return line_analyses
def _is_line_executed(self, line, log_parse_result, dialplan_parse_result):
if not line.is_executable:
return False
elif line.extension.startswith('_'):
pattern = line.extension[1:]
for extension in log_parse_result.list_executed_extensions(line.context, line.priority):
if not dialplan_parse_result.has_extension(line.context, extension) and\
_is_extension_match_pattern(extension, pattern):
return log_parse_result.is_executed(line.context, extension, line.priority)
return False
else:
return log_parse_result.is_executed(line.context, line.extension, line.priority)
def _is_extension_match_pattern(extension, pattern):
regex_pattern = _convert_ast_pattern_to_regex_pattern(pattern)
if re.match(regex_pattern, extension):
return True
else:
return False
def _convert_ast_pattern_to_regex_pattern(ast_pattern):
regex_pattern_list = ['^']
index = 0
length = len(ast_pattern)
while index < length:
cur_char = ast_pattern[index]
if cur_char == 'X':
regex_pattern_list.append('[0-9]')
elif cur_char == 'Z':
regex_pattern_list.append('[1-9]')
elif cur_char == 'N':
regex_pattern_list.append('[2-9]')
elif cur_char == '[':
close_index = ast_pattern.find(']', index)
regex_pattern_list.append('[{}]'.format(ast_pattern[index:close_index]))
index += close_index
elif cur_char == '.':
regex_pattern_list.append('.+')
break
elif cur_char == '!':
regex_pattern_list.append('.*')
break
else:
regex_pattern_list.append(re.escape(cur_char))
index += 1
regex_pattern_list.append('$')
return ''.join(regex_pattern_list)
class _Analysis(object):
def __init__(self, filename, line_analyses):
self.filename = filename
self.line_analyses = line_analyses
class _LineAnalysis(object):
def __init__(self, content, is_executable, is_executed):
self.content = content
self.is_executable = is_executable
self.is_executed = is_executed
|
from __future__ import unicode_literals
__author__ = "mozman <mozman@gmx.at>"
import warnings
from ..legacy.layouts import DXF12Layout, DXF12BlockLayout
from ..lldxf.classifiedtags import ClassifiedTags
PAPER_SPACE = '*Paper_Space'
TMP_PAPER_SPACE_NAME = '*Paper_Space999999'
class Layouts(object):
def __init__(self, drawing):
self.drawing = drawing
self._layouts = {} # stores Layout() objects
self._dxf_layout_management_table = None # stores DXF layout handles key=layout_name; value=layout_handle
self._setup()
@property
def dxffactory(self):
return self.drawing.dxffactory
def _setup(self):
layout_table_handle = self.drawing.rootdict['ACAD_LAYOUT']
self._dxf_layout_management_table = self.dxffactory.wrap_handle(layout_table_handle)
# name ... layout name
# handle ... handle to DXF object Layout
for name, handle in self._dxf_layout_management_table.items():
layout = Layout(self.drawing, handle)
self._layouts[name] = layout
def link_block_entities_into_layouts(self):
# layout entity spaces are always linked to the block definition
layout_spaces = self.drawing.entities.get_entity_space()
for layout in self:
if not layout.is_active():
# copy block entity space to layout entity space
layout_spaces.set_entity_space(layout.layout_key, layout.block.get_entity_space())
# now the block entity space and layout entity space references the same EntitySpace() object
def __contains__(self, name):
return name in self._layouts
def __iter__(self):
return iter(self._layouts.values())
def modelspace(self):
return self.get('Model')
def names(self):
return self._layouts.keys()
def get(self, name):
if name is None:
first_layout_name = self.names_in_taborder()[1]
return self._layouts[first_layout_name]
else:
return self._layouts[name]
def names_in_taborder(self):
names = []
for name, layout in self._layouts.items():
names.append((layout.taborder, name))
return [name for order, name in sorted(names)]
def get_layout_for_entity(self, entity):
return self.get_layout_by_key(entity.dxf.owner)
def get_layout_by_key(self, layout_key):
for layout in self._layouts.values():
if layout_key == layout.layout_key:
return layout
raise KeyError("Layout with key '{}' does not exist.".format(layout_key))
def create(self, name, dxfattribs=None): # TODO remove deprecated interface
warnings.warn("Layouts.create() is deprecated use Layout.new() instead.", DeprecationWarning)
self.new(name, dxfattribs)
def new(self, name, dxfattribs=None):
""" Create a new Layout.
"""
if dxfattribs is None:
dxfattribs = {}
if name in self._layouts:
raise ValueError("Layout '{}' already exists".format(name))
def create_dxf_layout_entity():
dxfattribs['name'] = name
dxfattribs['owner'] = self._dxf_layout_management_table.dxf.handle
dxfattribs.setdefault('taborder', len(self._layouts) + 1)
dxfattribs['block_record'] = block_record_handle
entity = self.drawing.objects.create_new_dxf_entity('LAYOUT', dxfattribs)
return entity.dxf.handle
block_layout = self.drawing.blocks.new_layout_block()
block_record_handle = block_layout.block_record_handle
block_record = block_layout.block_record
layout_handle = create_dxf_layout_entity()
block_record.dxf.layout = layout_handle
# set block entity space as layout entity space
self.drawing.entities.set_layout_space(layout_handle, block_layout.get_entity_space())
# create valid layout entity
layout = Layout(self.drawing, layout_handle)
# add layout to management tables
self._dxf_layout_management_table[name] = layout_handle
self._layouts[name] = layout
return layout
def set_active_layout(self, name):
if name == 'Model': # reserved layout name
raise ValueError("Can not set model space as active layout")
new_active_layout = self.get(name) # raises KeyError if no layout 'name' exists
old_active_layout_key = self.drawing.get_active_layout_key()
if old_active_layout_key == new_active_layout.layout_key:
return # layout 'name' is already the active layout
blocks = self.drawing.blocks
new_active_paper_space_name = new_active_layout.block_record_name
blocks.rename_block(PAPER_SPACE, TMP_PAPER_SPACE_NAME)
blocks.rename_block(new_active_paper_space_name, PAPER_SPACE)
blocks.rename_block(TMP_PAPER_SPACE_NAME, new_active_paper_space_name)
# Layout spaces stored by layout key, no exchange necessary
def delete(self, name):
""" Delete layout *name* and all entities on it. Raises *KeyError* if layout *name* not exists.
Raises *ValueError* for deleting model space.
"""
if name == 'Model':
raise ValueError("can not delete model space layout")
layout = self._layouts[name]
if layout.layout_key == self.drawing.get_active_layout_key(): # name is the active layout
for layout_name in self.names():
if layout_name not in (name, 'Model'): # set any other layout as active layout
self.set_active_layout(layout_name)
break
self._dxf_layout_management_table.remove(layout.name)
del self._layouts[layout.name]
layout.destroy()
class Layout(DXF12Layout):
""" Layout representation
Every layout consist of a LAYOUT entity in the OBJECTS section, an associated BLOCK in the BLOCKS section and a
BLOCK_RECORD_TABLE entry.
layout_key: handle of the BLOCK_RECORD, every layout entity has this handle as owner attribute (entity.dxf.owner)
There are 3 different layout types:
1. Model Space - not deletable, all entities of this layout are stored in the DXF file in the ENTITIES section, the
associated '*Model_Space' block is empty, block name '*Model_Space' is mandatory, the layout name is 'Model' and it
is mandatory.
2. Active Layout - all entities of this layout are stored in the DXF file also in the ENTITIES section, the
associated '*Paper_Space' block is empty, block name '*Paper_Space' is mandatory and also marks the active
layout, the layout name can be an arbitrary string.
3. Inactive Layout - all entities of this layouts are stored in the DXF file in the associated BLOCK
called '*Paper_SpaceN', where N is an arbitrary number, I don't know if the block name schema '*Paper_SpaceN' is
mandatory, the layout name can be an arbitrary string.
There is no different handling for active layouts and inactive layouts in ezdxf, this differentiation is just
for AutoCAD important and it is not described in the DXF standard.
Internal Structure:
For EVERY layout exists a BlockLayout() object in the blocks section and an EntitySpace() object in the entities
sections. the block layout entity section and the layout entity section are the SAME object.
See Layouts.create() line after comment 'set block entity space as layout entity space'.
ALL layouts entity spaces (also Model Space) are managed in a LayoutSpaces() object in the EntitySection() object.
Which allows full access to all entities on all layouts at every time.
"""
def __init__(self, drawing, layout_handle):
dxffactory = drawing.dxffactory
self.dxf_layout = dxffactory.wrap_handle(layout_handle)
self._block_record_handle = self.dxf_layout.dxf.block_record
entitities_section = drawing.sections.entities
layout_space = entitities_section.get_layout_space(self.layout_key)
super(Layout, self).__init__(layout_space, dxffactory, 0)
self._layout_handle = layout_handle
self._paperspace = 0 if self.name == 'Model' else 1
# start of public interface
def __contains__(self, entity):
if not hasattr(entity, 'dxf'): # entity is a handle and not a wrapper class
entity = self.get_entity_by_handle(entity)
return True if entity.dxf.owner == self.layout_key else False
@property
def dxf(self):
return self.dxf_layout.dxf
# end of public interface
@property
def layout_key(self):
return self._block_record_handle
@property
def block_record(self):
return self.drawing.dxffactory.wrap_handle(self._block_record_handle)
@property
def block_record_name(self):
return self.block_record.dxf.name
@property
def block(self):
return self.drawing.blocks.get(self.block_record_name)
@property
def name(self):
return self.dxf_layout.dxf.name
@property
def taborder(self):
return self.dxf_layout.dxf.taborder
def is_active(self):
return self.block_record_name in ('*Model_Space', '*Paper_Space')
def _set_paperspace(self, entity):
entity.dxf.paperspace = self._paperspace
entity.dxf.owner = self.layout_key
def destroy(self):
self.delete_all_entities()
self.drawing.blocks.delete_block(self.block.name)
self.drawing.objects.remove_handle(self._layout_handle)
self.drawing.entitydb.delete_handle(self._layout_handle)
class BlockLayout(DXF12BlockLayout):
def add_entity(self, entity):
""" Add entity to the block entity space.
"""
# entity can be ClassifiedTags() or a GraphicEntity() or inherited wrapper class
if isinstance(entity, ClassifiedTags):
entity = self._dxffactory.wrap_entity(entity)
entity.dxf.owner = self.block_record_handle
self._entity_space.append(entity.dxf.handle)
@property
def block_record_handle(self):
return self.block.dxf.owner
def set_block_record_handle(self, block_record_handle):
self.block.dxf.owner = block_record_handle
self.endblk.dxf.owner = block_record_handle
@property
def block_record(self):
return self.drawing.dxffactory.wrap_handle(self.block_record_handle)
def get_entity_space(self):
return self._entity_space
def set_entity_space(self, entity_space):
self._entity_space = entity_space
def destroy(self):
self.drawing.sections.tables.block_records.remove_handle(self.block_record_handle)
super(BlockLayout, self).destroy()
|
'''
Created on 8. juli 2014
@author: perroe
'''
from PyQt4.QtGui import QFormLayout, QToolButton
from ert_gui.ide.keywords.definitions import RangeStringArgument, \
ProperNameFormatStringArgument
from ert_gui.models.connectors import EnsembleSizeModel
from ert_gui.models.connectors.init import CaseSelectorModel
from ert_gui.models.connectors.run import SensitivityStudy, \
ActiveRealizationsModel, RunPathModel, SensitivityTargetCaseFormatModel
from ert_gui.simulation import SensitivityStudyParametersPanel
from ert_gui.simulation.simulation_config_panel import SimulationConfigPanel
from ert_gui.widgets import util
from ert_gui.widgets.active_label import ActiveLabel
from ert_gui.widgets.closable_dialog import ClosableDialog
from ert_gui.widgets.combo_choice import ComboChoice
from ert_gui.widgets.string_box import StringBox
class SensitivityStudyPanel(SimulationConfigPanel):
'''
Panel for setting parameters for sensitivity study.
'''
def __init__(self):
'''
Fills in the input panel for sensitivity study parameters.
'''
SimulationConfigPanel.__init__(self, SensitivityStudy())
layout = QFormLayout()
case_model = CaseSelectorModel()
case_selector = ComboChoice(case_model, "Current case", "init/current_case_selection")
layout.addRow(case_selector.getLabel(), case_selector)
runpath_model = RunPathModel()
runpath_label = ActiveLabel(runpath_model, "Runpath", "config/simulation/runpath")
layout.addRow(runpath_label.getLabel(), runpath_label)
number_of_realizations_model = EnsembleSizeModel()
number_of_realizations_label = ActiveLabel(number_of_realizations_model, "Number of realizations", "config/ensemble/num_realizations")
layout.addRow(number_of_realizations_label.getLabel(), number_of_realizations_label)
sensitivity_target_case_format_model = SensitivityTargetCaseFormatModel()
self.iterated_target_case_format_field = StringBox(sensitivity_target_case_format_model, "Target case format",
"config/simulation/sensitivity_target_case_format")
self.iterated_target_case_format_field.setValidator(ProperNameFormatStringArgument())
layout.addRow(self.iterated_target_case_format_field.getLabel(), self.iterated_target_case_format_field)
self.parameters_popup_button = QToolButton()
self.parameters_popup_button.setIcon(util.resourceIcon("ide/small/cog_edit.png"))
self.parameters_popup_button.clicked.connect(self.showParametersPopup)
self.parameters_popup_button.setMaximumSize(20, 20)
layout.addRow("Parameters:", self.parameters_popup_button)
active_realizations_model = ActiveRealizationsModel()
self.active_realizations_field = StringBox(active_realizations_model, "Active realizations", "config/simulation/active_realizations")
self.active_realizations_field.setValidator(RangeStringArgument(number_of_realizations_model.getValue()))
layout.addRow(self.active_realizations_field.getLabel(), self.active_realizations_field)
self.active_realizations_field.validationChanged.connect(self.simulationConfigurationChanged)
self.setLayout(layout)
def isConfigurationValid(self):
'''
Check if the given input configuration is valid, and that all needed
data is given.
'''
return self.active_realizations_field.isValid()
def showParametersPopup(self):
parameter_panel = SensitivityStudyParametersPanel()
dialog = ClosableDialog("Sensitivity Study Parameters", parameter_panel, self.parent())
dialog.exec_()
def toggleAdvancedOptions(self, show_advanced):
self.active_realizations_field.setVisible(show_advanced)
self.layout().labelForField(self.active_realizations_field).setVisible(show_advanced)
|
from .grammar import flags
from .tokex_class import Tokex
from .tokenizers.tokenizer import TokexTokenizer
def compile(input_grammar,
allow_sub_grammar_definitions=True,
tokenizer=TokexTokenizer,
default_flags=flags.DEFAULTS):
"""
Constructs and returns an instance of _StringParser for repeated parsing of strings using the given grammar.
Inputs: input_grammar - The grammar to use to parse the input string.
allow_sub_grammar_definitions - A Boolean, indicating whether or not sub grammar declarations,
(@name: grammar @@), should be processed. If grammarString has
come from an untrusted source this should be set to False, to
mitigate the potential for a `Billion Laughs` attack.
tokenizer - Optional: An instance or class of tokenizers.Tokenizer.TokexTokenizer or a subclass of it,
used to tokenize the input string for parsing. Defaults to the base class TokexTokenizer.
default_flags - A set of flags which will apply to all elements by default.
Default flags can be overridden by specifying an opposing flag on elements in the grammar.
Outputs: An instance of _StringParser whose `match` function can be used to repeatedly parse input strings.
"""
return Tokex(input_grammar, allow_sub_grammar_definitions, tokenizer, default_flags=default_flags)
def match(input_grammar,
input_string,
match_entirety=True,
allow_sub_grammar_definitions=True,
tokenizer=TokexTokenizer,
default_flags=flags.DEFAULTS,
debug=False):
"""
Convenience function for performing matches using a grammar against a string.
Inputs: input_grammar - The grammar to use to parse the input string.
input_string - The string to be parsed.
match_entirety - A boolean, if True requires the entire string to be matched by the grammar.
if False, trailing tokens not matched by the grammar will not cause a match failure.
allow_sub_grammar_definitions - A Boolean, indicating whether or not sub grammar declarations,
(@name: grammar @@), should be processed. If grammarString has
come from an untrusted source this should be set to False, to
mitigate the potential for a `Billion Laughs` attack.
tokenizer - Optional: An instance or class of tokenizers.Tokenizer.TokexTokenizer or a subclass of it,
used to tokenize the input string for parsing. Defaults to the base class TokexTokenizer.
default_flags - A set of flags which will apply to all elements by default.
Default flags can be overridden by specifying an opposing flag on elements in the grammar.
debug - A boolean, if True will set the debugging level to DEBUG for the duration of the match
Outputs: The result of matching the input_string, if it matches, else None.
"""
return Tokex(
input_grammar,
allow_sub_grammar_definitions,
tokenizer,
default_flags=default_flags,
).match(input_string, match_entirety=match_entirety, debug=debug)
|
import os
from waflib import Utils, Build, Context
from waflib.Configure import conf
@conf
def CHECK_FOR_THIRD_PARTY(conf):
return os.path.exists(os.path.join(Context.g_module.top, 'third_party'))
Build.BuildContext.CHECK_FOR_THIRD_PARTY = CHECK_FOR_THIRD_PARTY
@conf
def CHECK_POPT(conf):
return conf.CHECK_BUNDLED_SYSTEM('popt', checkfunctions='poptGetContext', headers='popt.h')
Build.BuildContext.CHECK_POPT = CHECK_POPT
@conf
def CHECK_CMOCKA(conf):
return conf.CHECK_BUNDLED_SYSTEM_PKG('cmocka', minversion='1.1.3')
Build.BuildContext.CHECK_CMOCKA = CHECK_CMOCKA
@conf
def CHECK_SOCKET_WRAPPER(conf):
return conf.CHECK_BUNDLED_SYSTEM_PKG('socket_wrapper', minversion='1.2.5')
Build.BuildContext.CHECK_SOCKET_WRAPPER = CHECK_SOCKET_WRAPPER
@conf
def CHECK_NSS_WRAPPER(conf):
return conf.CHECK_BUNDLED_SYSTEM_PKG('nss_wrapper', minversion='1.1.11')
Build.BuildContext.CHECK_NSS_WRAPPER = CHECK_NSS_WRAPPER
@conf
def CHECK_RESOLV_WRAPPER(conf):
return conf.CHECK_BUNDLED_SYSTEM_PKG('resolv_wrapper', minversion='1.1.7')
Build.BuildContext.CHECK_RESOLV_WRAPPER = CHECK_RESOLV_WRAPPER
@conf
def CHECK_UID_WRAPPER(conf):
return conf.CHECK_BUNDLED_SYSTEM_PKG('uid_wrapper', minversion='1.2.7')
Build.BuildContext.CHECK_UID_WRAPPER = CHECK_UID_WRAPPER
@conf
def CHECK_PAM_WRAPPER(conf):
return conf.CHECK_BUNDLED_SYSTEM_PKG('pam_wrapper', minversion='1.1.2')
Build.BuildContext.CHECK_PAM_WRAPPER = CHECK_PAM_WRAPPER
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('orb', '0004_auto_20150420_2152'),
]
operations = [
migrations.CreateModel(
name='ResourceCriteria',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('description', models.TextField()),
('order_by', models.IntegerField(default=0)),
],
options={
},
bases=(models.Model,),
),
]
|
import errno
import logging
import os
from itertools import chain
from time import sleep, time
import re
from dateutil.parser import parse as parse_date
from onemirror.database import OneDriveDatabaseManager
from onemirror.exception import ResyncRequired
logger = logging.getLogger('onemirror')
EPOCH = parse_date('1970-01-01T00:00:00Z')
class OneMirrorUpdate(object):
def __init__(self, mirror, delta, full_resync=False):
self.mirror = mirror
self.delta = delta
self.session = self.mirror.client.session
self.local_dir = local = self.mirror.local_path
self.full_resync = full_resync
self.exclude = exclude = self.mirror.exclude
self.name = {}
self.parent = {}
self.root = self.mirror.root_id
self.path_cache = {self.root: ''}
self.to_delete = []
self.current = current = set()
if full_resync:
for path, dirnames, filenames in os.walk(local):
dir = os.path.relpath(path, local).replace('\\', '/')
if dir == '.':
for name in chain(dirnames, filenames):
if exclude is None or not exclude.match(name):
current.add(name)
else:
for name in chain(dirnames, filenames):
path = '%s/%s' % (dir, name)
if exclude is None or not exclude.match(path):
current.add(path)
def update(self):
items = 0
for item in self.delta:
items += 1
self.update_item(item)
self.to_delete.sort(key=len, reverse=True)
for dir in self.to_delete:
try:
os.rmdir(dir)
except OSError:
logger.warning('Could not delete non-empty directory: %s', dir)
else:
logger.info('Deleted directory: %s', dir)
if self.full_resync:
isdir = (errno.EISDIR, errno.EACCES)
for item in sorted(self.current, key=len, reverse=True):
path = self.local_path(item)
try:
os.remove(path)
except OSError as e:
if e.errno not in isdir:
raise
try:
os.rmdir(path)
except OSError:
logger.warning('Could not delete local-only non-empty directory: %s', item)
else:
logger.info('Deleted local-only directory: %s', item)
else:
logger.info('Deleted local-only file: %s', item)
return items
def get_path(self, id):
if id in self.path_cache:
return self.path_cache[id]
if self.parent[id] == self.root:
path = self.name[id]
else:
path = '%s/%s' % (self.get_path(self.parent[id]), self.name[id])
self.path_cache[id] = path
return path
def local_path(self, path):
return os.path.join(self.local_dir, path)
def update_item(self, item, EPOCH=EPOCH, EEXIST=errno.EEXIST, ENOENT=errno.ENOENT):
item_id = item['id']
self.name[item_id] = item['name']
self.parent[item_id] = item['parentReference']['id']
path = self.get_path(item_id)
if self.exclude is not None and self.exclude.match(path):
logger.info('Ignore file: %s', path)
return
if 'deleted' in item:
local = self.local_path(path)
if 'file' in item:
try:
os.remove(local)
except OSError as e:
if e.errno != ENOENT:
raise
logger.info('Deleted file: %s', path)
else:
self.to_delete.append(local)
logger.debug('Queueing for deletion: %s', path)
elif 'file' in item:
mtime = round((parse_date(item['lastModifiedDateTime']) - EPOCH).total_seconds(), 2)
local = self.local_path(path)
self.current.discard(path)
if os.path.exists(local):
stat = os.stat(local)
if round(stat.st_mtime, 2) == mtime and item['size'] == stat.st_size:
logger.debug('Already up-to-date: %s', path)
return
logger.info('Downloading: %s', path)
self.download(item['@content.downloadUrl'], local)
os.utime(local, (mtime, mtime))
elif 'folder' in item:
self.current.discard(path)
try:
os.mkdir(self.local_path(path))
except OSError as e:
if e.errno != EEXIST:
raise
else:
logger.info('Creating directory: %s', path)
else:
logger.info('Unchanged: %s', path)
def download(self, url, path):
response = self.session.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in response.iter_content(chunk_size=131072):
if chunk:
f.write(chunk)
class OneDriveMirror(OneDriveDatabaseManager):
def __init__(self, local, remote, *args, **kwargs):
self.local_path = local
self.remote_path = remote
self.delta_url = None
self.last_full_update = 0
self.interval = kwargs.pop('interval', 10)
self.full_update_interval = kwargs.pop('full_update', 3600)
exclude = kwargs.pop('exclude', None)
if exclude is not None:
self.exclude = re.compile(exclude)
else:
self.exclude = None
super(OneDriveMirror, self).__init__(*args, **kwargs)
def update_delta(self, url):
self.delta_url = url
def __enter__(self):
super(OneDriveMirror, self).__enter__()
metadata = self.client.metadata(self.remote_path)
if 'error' in metadata:
raise ValueError('Folder does not exist on OneDrive')
self.root_id = metadata['id']
return self
def update(self):
if time() - self.last_full_update > self.full_update_interval:
self.update_delta(None)
full_resync = not self.delta_url
try:
delta_viewer = self.client.view_delta(self.remote_path, url=self.delta_url)
except ResyncRequired:
self.update_delta(None)
return self.update()
delta_viewer.new_delta = self.update_delta
res = OneMirrorUpdate(self, delta_viewer, full_resync).update()
if full_resync:
self.last_full_update = time()
return res
def run(self):
logger.info('OneMirror start.')
while True:
if self.update() == 0:
sleep(self.interval)
|
"""Custom topology example
Two directly connected switches plus a host for each switch:
host --- switch --- switch --- host
Adding the 'topos' dict with a key/value pair to generate our newly defined
topology enables one to pass in '--topo=mytopo' from the command line.
"""
from mininet.topo import Topo
from mininet.link import TCLink
from mininet.node import CPULimitedHost, RemoteController
from mininet.net import Mininet
from mininet.log import setLogLevel
from argparse import ArgumentParser
from mininet.util import dumpNodeConnections
class PCountTopo(Topo):
"Simple topology example."
def __init__(self, loss, num_flows):
super(PCountTopo, self).__init__()
self.loss = loss
self.generate(loss, num_flows)
def generate(self, loss, m):
"Create custom topo."
# Add hosts and switches
u = self.addSwitch('s%s' % (2 * m + 1))
d = self.addSwitch('s%s' % (2 * m + 2))
self.addLink(u, d)
for i in range(1, m + 1):
host_str = 'h%s' % (i)
host = self.addHost(host_str)
self.addLink(host, u)
for i in range(m + 1, 2 * m + 1):
host_str = 'h%s' % (i)
host = self.addHost(host_str)
self.addLink(host, d)
class H6S10(Topo):
"Simple topology example."
def __init__(self, loss):
super(H6S10, self).__init__()
self.loss = loss
self.generate()
def generate(self):
"Create custom topo."
# Add hosts and switches
h1 = self.addHost('h1')
h2 = self.addHost('h2')
h3 = self.addHost('h3')
h4 = self.addHost('h4')
h5 = self.addHost('h5')
h6 = self.addHost('h6')
s7 = self.addSwitch('s7')
s8 = self.addSwitch('s8')
s9 = self.addSwitch('s9')
s10 = self.addSwitch('s10')
s11 = self.addSwitch('s11')
s12 = self.addSwitch('s12')
s13 = self.addSwitch('s13')
s14 = self.addSwitch('s14')
s15 = self.addSwitch('s15')
s16 = self.addSwitch('s16')
# Add links
self.addLink(h1, s7)
self.addLink(h5, s7)
self.addLink(s7, s13)
self.addLink(s7, s8)
self.addLink(s13, s9)
self.addLink(s13, s14)
self.addLink(s8, s9)
self.addLink(s8, s10)
self.addLink(s14, s12)
self.addLink(s14, s11)
self.addLink(s10, s11)
self.addLink(s10, s12)
self.addLink(s12, s15)
self.addLink(s9, h2)
self.addLink(s11, h3)
self.addLink(s12, h4)
self.addLink(s15, h6)
self.addLink(s9, s16)
self.addLink(s16, s10)
class H6S9(Topo):
"Simple topology example."
def __init__(self, loss):
super(H6S9, self).__init__()
self.loss = loss
self.generate()
def generate(self):
"Create custom topo."
# Add hosts and switches
h1 = self.addHost('h1')
h2 = self.addHost('h2')
h3 = self.addHost('h3')
h4 = self.addHost('h4')
h5 = self.addHost('h5')
h6 = self.addHost('h6')
s7 = self.addSwitch('s7')
s8 = self.addSwitch('s8')
s9 = self.addSwitch('s9')
s10 = self.addSwitch('s10')
s11 = self.addSwitch('s11')
s12 = self.addSwitch('s12')
s13 = self.addSwitch('s13')
s14 = self.addSwitch('s14')
s15 = self.addSwitch('s15')
# Add links
self.addLink(h1, s7)
self.addLink(h5, s7)
self.addLink(s7, s13)
self.addLink(s7, s8)
self.addLink(s13, s9)
self.addLink(s13, s14)
self.addLink(s8, s9)
self.addLink(s8, s10)
self.addLink(s14, s12)
self.addLink(s14, s11)
self.addLink(s10, s11)
self.addLink(s10, s12)
self.addLink(s12, s15)
self.addLink(s9, h2)
self.addLink(s11, h3)
self.addLink(s12, h4)
self.addLink(s15, h6)
class H4S8(Topo):
"Simple topology example."
def __init__(self, loss):
super(H4S8, self).__init__()
self.loss = loss
self.generate()
def generate(self):
"Create custom topo."
# Add hosts and switches
h1 = self.addHost('h1')
h2 = self.addHost('h2')
h3 = self.addHost('h3')
h4 = self.addHost('h4')
s5 = self.addSwitch('s5')
s6 = self.addSwitch('s6')
s7 = self.addSwitch('s7')
s8 = self.addSwitch('s8')
s9 = self.addSwitch('s9')
s10 = self.addSwitch('s10')
s11 = self.addSwitch('s11')
s12 = self.addSwitch('s12')
# Add links
self.addLink(h1, s5)
self.addLink(s5, s11)
self.addLink(s5, s6)
self.addLink(s11, s7)
self.addLink(s11, s12)
self.addLink(s6, s7)
self.addLink(s6, s8)
self.addLink(s12, s10)
self.addLink(s12, s9)
self.addLink(s8, s9)
self.addLink(s8, s10)
self.addLink(s7, h2)
self.addLink(s9, h3)
self.addLink(s10, h4)
class H9S6(Topo):
"Simple topology example."
def __init__(self, loss):
super(H9S6, self).__init__()
self.loss = loss
self.generate()
def generate(self):
"Create custom topo."
# Add hosts and switches
h1 = self.addHost('h1')
h2 = self.addHost('h2')
h3 = self.addHost('h3')
h4 = self.addHost('h4')
h5 = self.addHost('h5')
h6 = self.addHost('h6')
h7 = self.addHost('h7')
h8 = self.addHost('h8')
h9 = self.addHost('h9')
s10 = self.addSwitch('s10')
s11 = self.addSwitch('s11')
s12 = self.addSwitch('s12')
s13 = self.addSwitch('s13')
s14 = self.addSwitch('s14')
s15 = self.addSwitch('s15')
# Add links
self.addLink(h1, s12)
self.addLink(h2, s13)
self.addLink(h3, s10)
self.addLink(h4, s10)
self.addLink(h5, s14)
self.addLink(h6, s14)
self.addLink(h7, s15)
self.addLink(h8, s15)
self.addLink(h9, s15)
self.addLink(s12, s11)
self.addLink(s13, s11, loss=self.loss)
self.addLink(s11, s10)
self.addLink(s14, s10, loss=self.loss)
self.addLink(s15, s10)
class H3S4(Topo):
"Simple topology example."
def __init__(self, loss):
"Create custom topo."
# Initialize topology
Topo.__init__(self)
self.loss = loss
# Add hosts and switches
leftHost = self.addHost('h1')
leftHost2 = self.addHost('h2')
rightHost = self.addHost('h3')
leftSwitch = self.addSwitch('s4')
leftSwitch2 = self.addSwitch('s5')
middleSwitch = self.addSwitch('s6')
rightSwitch = self.addSwitch('s7')
# Add links
self.addLink(leftHost, leftSwitch) #(h1,s4)
self.addLink(leftHost2, leftSwitch2) #(h2,s5)
#self.addLink( leftSwitch, middleSwitch ) #(s4,s6)
self.addLink(leftSwitch, middleSwitch , loss=self.loss) #(s4,s6)
self.addLink(leftSwitch2, middleSwitch) #(s5,s6)
self.addLink(middleSwitch, rightSwitch) #(s6,s7)
#self.addLink( middleSwitch, rightSwitch , loss=self.loss ) #(s6,s7)
self.addLink(rightSwitch, rightHost) #(s7,h3)
# preconfigure the ARP table
#arpNodes = [rightHost,rightSwitch]
#net = Mininet(self)
#print "\n dumping node connection info"
#dumpNodeConnections(net.hosts)
#print "\n"
class H2S2(Topo):
"Simple topology example."
def __init__(self, loss):
"Create custom topo."
self.loss = loss
# Initialize topology
Topo.__init__(self)
# Add hosts and switches
leftHost = self.addHost('h1')
rightHost = self.addHost('h2')
leftSwitch = self.addSwitch('s3')
rightSwitch = self.addSwitch('s4')
# Add links
self.addLink(leftHost, leftSwitch)
self.addLink(leftSwitch, rightSwitch , loss=self.loss)
self.addLink(rightSwitch, rightHost)
class H3S2(Topo):
"Simple topology example."
def __init__(self, loss):
super(H3S2, self).__init__()
self.loss = loss
self.generate()
def generate(self):
"Create custom topo."
# Add hosts and switches
h1 = self.addHost('h1')
h2 = self.addHost('h2')
h3 = self.addHost('h3')
s4 = self.addSwitch('s4')
s5 = self.addSwitch('s5')
# Add links
#linkopts = dict(bw=10,delay='5ms',loss=10)
#self.addLink( h1, s4 )
self.addLink(h1, s4)
self.addLink(h2, s4)
#self.addLink( leftSwitch, rightSwitch,0,1,delay='5ms',loss=10)
#self.addLink( leftSwitch, rightSwitch,loss=10)
#self.addLink( leftSwitch, rightSwitch )
self.addLink(s5, h3)
self.addLink(s4, s5 , loss=self.loss)
#self.addLink( leftSwitch, rightSwitch, loss=self.loss)
class H3S3(Topo):
"Simple topology example."
def __init__(self , loss):
"Create custom topo."
# Initialize topology
Topo.__init__(self)
self.loss = loss
# Add hosts and switches
leftHost = self.addHost('h1')
leftHost2 = self.addHost('h2')
rightHost = self.addHost('h3')
leftSwitch = self.addSwitch('s4')
middleSwitch = self.addSwitch('s5')
rightSwitch = self.addSwitch('s6')
# Add links
self.addLink(leftHost, leftSwitch)
self.addLink(leftHost2, leftSwitch)
self.addLink(leftSwitch, middleSwitch, loss=self.loss)
self.addLink(middleSwitch, rightSwitch)
self.addLink(rightSwitch, rightHost)
|
""" Work with PBS script files """
import shlex
import logging
def set_options_from_pbs_script(arg_parser, options, pbs_script):
""" Add directives in pbs_script to existing options, using arg_parser
"""
opt_string = ''
pbs_fh = open(pbs_script)
for line in pbs_fh:
if line.startswith('#!'):
continue
if line.strip() == (''):
continue
if not line.startswith('#PBS '):
break
opt_string += line.replace('#PBS', '', 1)
pbs_fh.close()
logging.debug("opt_string extracted from PBS file:\n%s", opt_string)
argv = shlex.split(opt_string)
options, args = arg_parser.parse_args(argv, options)
|
import time
from django.core.management.base import BaseCommand, CommandError
from tasks.models import TaskSubmission
class Command(BaseCommand):
help = 'List all ungraded tasks'
def handle(self, *args, **options):
ungraded = TaskSubmission.objects.filter(grading_completed=False)
i = 1
count = ungraded.count()
for submission in ungraded:
self.stdout.write('Regrading submission {} ({}/{}).'.format(submission, i, count))
i += 1
|
"""Quaternion methods for finding rotation, reflection, and improper rotation matrices."""
def get_rotation_matrix(a,b):
"""Find the rotation matrix that takes a to b.
"""
a = np.array(a)
b = np.array(b)
a2 = np.dot(a,a)
b2 = np.dot(b,b)
if not np.isclose(a2, b2):
msg = "The vectors must be the same length."
raise ValueError(msg.format(a))
v = np.cross(a,b)
w = np.sqrt(a2*b2) + np.dot(a,b)
Q = np.hstack([v,w])
if np.count_nonzero(Q) == 0:
if (np.allclose(a,b) or np.allclose(-a,b)):
msg = ("The vectors provided are parallel and the "
"rotation axis is illdefined.")
raise ValueError(msg.format(a))
else:
msg = "There is something wrong with the provided vectors."
raise ValueError(msg.format(a))
else:
Q = Q/np.linalg.norm(Q)
x = Q[0]
y = Q[1]
z = Q[2]
w = Q[3]
return np.array([[w**2 + x**2 - y**2 - z**2, 2*(x*y - w*z), 2*(x*z + w*y)],
[2*(x*y + w*z), w**2 - x**2 + y**2 - z**2, 2*(y*z - w*x)],
[2*(x*z - w*y), 2*(y*z + w*x), w**2 - x**2 - y**2 + z**2]])
def get_improper_rotation_matrix(a,b):
"""Find the improper rotation that takes a to b.
"""
R = RQ(a, -b)
return np.dot(-np.eye(3), R)
def get_reflection_matrix(a,b):
"""Find the reflection matrix that takes a to b.
"""
a = np.array(a)
b = np.array(b)
a2 = np.dot(a,a)
b2 = np.dot(b,b)
if not np.isclose(a2, b2):
msg = "The vectors must be the same length."
raise ValueError(msg.format(a))
n = (np.dot(b,b) + np.dot(a,b))*a - (np.dot(a,a) + np.dot(a,b))*b
Q = n/norm(n)
if np.count_nonzero(Q) == 0:
if (np.allclose(a,b) or np.allclose(-a,b)):
sign = lambda x: x and (1, -1)[x<0]
return sign(np.dot(a,b))*np.eye(3,3)
else:
msg = "There is something wrong with the provided vectors."
raise ValueError(msg.format(a))
else:
Q = Q/np.linalg.norm(Q)
x = Q[0]
y = Q[1]
z = Q[2]
return np.array([[-x**2 + y**2 + z**2, -2*x*y, -2*x*z],
[-2*x*y, x**2 - y**2 + z**2, -2*y*z],
[-2*x*z, -2*y*z, x**2 + y**2 - z**2]])
|
from VMWConfigFile import *
from pyVim import connect
from pyVim.connect import SmartConnect, Disconnect
from pyVmomi import vim, vmodl
import atexit
import os
import sys
import ssl
import requests
import argparse
import time
import getpass
requests.packages.urllib3.disable_warnings()
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_NONE
def get_obj(content, vimtype, name):
obj = None
container = content.viewManager.CreateContainerView(content.rootFolder, vimtype, True)
for c in container.view:
if c.name == name:
obj = c
break
return obj
def wait_for_task(task, actionName='job', hideResult=False):
# """
# Waits and provides updates on a vSphere task
# """
while task.info.state == vim.TaskInfo.State.running:
time.sleep(2)
if task.info.state == vim.TaskInfo.State.success:
if task.info.result is not None and not hideResult:
out = '%s completed successfully, result: %s' % (actionName, task.info.result)
print out
else:
out = '%s completed successfully.' % actionName
print out
else:
out = '%s did not complete successfully: %s' % (actionName, task.info.error)
raise task.info.error
print out
return task.info.result
def add_dvPort_group(si, dv_switch, vlanId, name, bw):
dv_pg_spec = vim.dvs.DistributedVirtualPortgroup.ConfigSpec()
dv_pg_spec.name = name
dv_pg_spec.numPorts = 32
dv_pg_spec.type = vim.dvs.DistributedVirtualPortgroup.PortgroupType.earlyBinding
policy = vim.dvs.DistributedVirtualPortgroup.PortgroupPolicy()
policy.shapingOverrideAllowed = True
dv_pg_spec.policy = policy
dv_pg_spec.defaultPortConfig = vim.dvs.VmwareDistributedVirtualSwitch.VmwarePortConfigPolicy()
dv_pg_spec.defaultPortConfig.securityPolicy = vim.dvs.VmwareDistributedVirtualSwitch.SecurityPolicy()
if bw:
inShapePolicy = vim.dvs.DistributedVirtualPort.TrafficShapingPolicy()
inShapePolicy.enabled = vim.BoolPolicy(value=True)
inShapePolicy.averageBandwidth = vim.LongPolicy(value=bw * 1000 * 1000)
inShapePolicy.peakBandwidth = vim.LongPolicy(value=bw * 1000 * 1000)
dv_pg_spec.defaultPortConfig.inShapingPolicy = inShapePolicy
dv_pg_spec.defaultPortConfig.outShapingPolicy = inShapePolicy
dv_pg_spec.defaultPortConfig.vlan = vim.dvs.VmwareDistributedVirtualSwitch.VlanIdSpec()
dv_pg_spec.defaultPortConfig.vlan.vlanId = vlanId
dv_pg_spec.defaultPortConfig.securityPolicy.allowPromiscuous = vim.BoolPolicy(value=False)
dv_pg_spec.defaultPortConfig.securityPolicy.forgedTransmits = vim.BoolPolicy(value=False)
dv_pg_spec.defaultPortConfig.vlan.inherited = False
dv_pg_spec.defaultPortConfig.securityPolicy.macChanges = vim.BoolPolicy(value=False)
dv_pg_spec.defaultPortConfig.securityPolicy.inherited = False
task = dv_switch.AddDVPortgroup_Task([dv_pg_spec])
wait_for_task(task, si)
def CreateVMWPortGroup(si, vlanId, name, bw, dvs):
# print "Connected to VCENTER SERVER !"
# get Datacenter
content = si.RetrieveContent()
datacenter = get_obj(content, [vim.Datacenter], vc_settings["datacenter"])
# get cluster //ToDo: Needed?
# cluster = get_obj(content, [vim.ClusterComputeResource], vc_settings["cluster"])
dv_switch = get_obj(content, [vim.DistributedVirtualSwitch], dvs)
network_folder = datacenter.networkFolder
#Add port group to this switch
add_dvPort_group(si, dv_switch, int(vlanId), name, bw)
return 0
def main(*args, **kwargs):
parser = argparse.ArgumentParser(description='Create VMW portgroup on DVS')
parser.add_argument('-u', '--user', help='VC User', required=False)
parser.add_argument('-p', '--passw', help='VC User Pass', required=False)
# parser.add_argument('-d', '--dvs', help='DVS Name', required=False)
args = parser.parse_args()
if not args.user:
args.user = raw_input('User: ')
if not args.passw:
args.passw = getpass.getpass(prompt='Password:')
# if not args.dvs:
# args.dvs = raw_input('DVS: ')
try:
si = None
try:
# print "Trying to connect to VCENTER SERVER . . ."
#si = Service Instance of vCenter
si = connect.SmartConnect(host=vc_settings["vcenter"],
user=args.user,
pwd=args.passw,
port=443,
sslContext=context)
except IOError, e:
pass
atexit.register(Disconnect, si)
print "Connected to vCenter vSphere"
vlanIdRange = [1031, 1051]
for i in range(vlanIdRange[0], vlanIdRange[1]):
exitCode = CreateVMWPortGroup(si, i, 'DCHornos-NW01-MPLS-VLAN%d' % i, None, 'vdSwitch-DCHornos-Networking-01')
if exitCode == 0:
print "--------- PORTGROUP %d --------- \n\n" % i
print "Vlan-ID: %d" % i
print "DVS: vdSwitch-DCHornos-Networking-01"
else:
break
time.sleep(2)
except vmodl.MethodFault, e:
print "Caught vmodl fault: %s" % e.msg
return 1
except Exception, e:
print "Caught exception: %s" % str(e)
return 1
if __name__ == '__main__':
exit(main())
|
from PySide import QtCore, QtGui
class Ui_Accueil(object):
def setupUi(self, Accueil):
Accueil.setObjectName("Accueil")
Accueil.resize(254, 300)
Accueil.setMinimumSize(QtCore.QSize(254, 300))
Accueil.setMaximumSize(QtCore.QSize(254, 300))
self.centralwidget = QtGui.QWidget(Accueil)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_2 = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.b_new_ctr = QtGui.QPushButton(self.centralwidget)
self.b_new_ctr.setMinimumSize(QtCore.QSize(200, 50))
self.b_new_ctr.setObjectName("b_new_ctr")
self.verticalLayout.addWidget(self.b_new_ctr)
self.b_edit_ctr = QtGui.QPushButton(self.centralwidget)
self.b_edit_ctr.setMinimumSize(QtCore.QSize(200, 50))
self.b_edit_ctr.setObjectName("b_edit_ctr")
self.verticalLayout.addWidget(self.b_edit_ctr)
self.b_livraison = QtGui.QPushButton(self.centralwidget)
self.b_livraison.setMinimumSize(QtCore.QSize(200, 50))
self.b_livraison.setObjectName("b_livraison")
self.verticalLayout.addWidget(self.b_livraison)
self.horizontalLayout.addLayout(self.verticalLayout)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem2)
spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem3)
self.verticalLayout_2.addLayout(self.horizontalLayout)
Accueil.setCentralWidget(self.centralwidget)
self.statusbar = QtGui.QStatusBar(Accueil)
self.statusbar.setObjectName("statusbar")
Accueil.setStatusBar(self.statusbar)
self.retranslateUi(Accueil)
QtCore.QMetaObject.connectSlotsByName(Accueil)
def retranslateUi(self, Accueil):
Accueil.setWindowTitle(QtGui.QApplication.translate("Accueil", "MainWindow", None, QtGui.QApplication.UnicodeUTF8))
self.b_new_ctr.setText(QtGui.QApplication.translate("Accueil", "Nouveau contrat", None, QtGui.QApplication.UnicodeUTF8))
self.b_edit_ctr.setText(QtGui.QApplication.translate("Accueil", "Modifier un contrat", None, QtGui.QApplication.UnicodeUTF8))
self.b_livraison.setText(QtGui.QApplication.translate("Accueil", "Nouvelle livraison", None, QtGui.QApplication.UnicodeUTF8))
|
from random import Random
from unittest.mock import MagicMock
import pytest
from randovania.game_description.world.area_identifier import AreaIdentifier
from randovania.game_description.assignment import PickupTarget
from randovania.game_description.hint import Hint, HintType, PrecisionPair, HintLocationPrecision, HintItemPrecision, \
RelativeDataArea, RelativeDataItem
from randovania.game_description.item.item_category import ItemCategory
from randovania.game_description.world.node import LogbookNode
from randovania.game_description.resources.logbook_asset import LogbookAsset
from randovania.game_description.resources.pickup_entry import PickupEntry, PickupModel
from randovania.game_description.resources.pickup_index import PickupIndex
from randovania.games.game import RandovaniaGame
from randovania.generator.filler import runner
from randovania.generator.generator import create_player_pool
@pytest.mark.asyncio
async def test_run_filler(echoes_game_description,
default_layout_configuration,
mocker,
):
# Setup
rng = Random(5000)
status_update = MagicMock()
logbook_nodes = [node for node in echoes_game_description.world_list.all_nodes if isinstance(node, LogbookNode)]
player_pools = {
0: create_player_pool(rng, default_layout_configuration, 0, 1),
}
initial_pickup_count = len(player_pools[0].pickups)
patches = echoes_game_description.create_game_patches()
patches = patches.assign_hint(
logbook_nodes[0].resource(), Hint(HintType.LOCATION, None, PickupIndex(0))
)
action_log = (MagicMock(), MagicMock())
player_state = MagicMock()
player_state.index = 0
player_state.game = player_pools[0].game
player_state.pickups_left = runner._split_expansions(player_pools[0].pickups)[0]
player_state.scan_asset_initial_pickups = {}
mocker.patch("randovania.generator.filler.runner.retcon_playthrough_filler", autospec=True,
return_value=({player_state: patches}, action_log))
# Run
filler_result = await runner.run_filler(rng, player_pools, status_update)
assert filler_result.action_log == action_log
assert len(filler_result.player_results) == 1
result_patches = filler_result.player_results[0].patches
remaining_items = filler_result.player_results[0].unassigned_pickups
# Assert
assert len(result_patches.hints) == len(logbook_nodes)
assert [hint for hint in result_patches.hints.values()
if hint.precision is None] == []
assert initial_pickup_count == len(remaining_items) + len(result_patches.pickup_assignment.values())
def test_fill_unassigned_hints_empty_assignment(echoes_game_description):
# Setup
rng = Random(5000)
base_patches = echoes_game_description.create_game_patches()
expected_logbooks = sum(1 for node in echoes_game_description.world_list.all_nodes
if isinstance(node, LogbookNode))
# Run
result = runner.fill_unassigned_hints(base_patches,
echoes_game_description.world_list,
rng, {})
# Assert
assert len(result.hints) == expected_logbooks
def test_add_hints_precision(empty_patches, mocker):
failed_relative_provider = MagicMock(return_value=None)
relative_hint_provider = MagicMock()
mocker.patch("randovania.generator.filler.runner._get_relative_hint_providers",
return_value=[failed_relative_provider, relative_hint_provider])
player_state = MagicMock()
rng = MagicMock()
hints = [
Hint(HintType.LOCATION, PrecisionPair(HintLocationPrecision.DETAILED,
HintItemPrecision.DETAILED, include_owner=False), PickupIndex(1)),
Hint(HintType.LOCATION, None, PickupIndex(2)),
Hint(HintType.LOCATION, None, PickupIndex(3)),
]
initial_patches = empty_patches
for i, hint in enumerate(hints):
initial_patches = initial_patches.assign_hint(LogbookAsset(i), hint)
# Run
result = runner.add_hints_precision(player_state, initial_patches, rng)
# Assert
failed_relative_provider.assert_called_once_with(player_state, initial_patches, rng, PickupIndex(2))
relative_hint_provider.assert_called_once_with(player_state, initial_patches, rng, PickupIndex(3))
assert result.hints == {
LogbookAsset(0): Hint(HintType.LOCATION, PrecisionPair(HintLocationPrecision.DETAILED,
HintItemPrecision.DETAILED,
include_owner=False),
PickupIndex(1)),
LogbookAsset(1): Hint(HintType.LOCATION, PrecisionPair(HintLocationPrecision.WORLD_ONLY,
HintItemPrecision.PRECISE_CATEGORY,
include_owner=True),
PickupIndex(2)),
LogbookAsset(2): relative_hint_provider.return_value,
}
def _make_pickup(item_category: ItemCategory):
return PickupEntry(
name="Pickup",
model=PickupModel(
game=RandovaniaGame.METROID_PRIME_ECHOES,
name="EnergyTransferModule",
),
item_category=item_category,
broad_category=item_category,
progression=tuple(),
)
@pytest.mark.parametrize("precise_distance", [False, True])
@pytest.mark.parametrize("location_precision", [HintLocationPrecision.RELATIVE_TO_AREA,
HintLocationPrecision.RELATIVE_TO_INDEX])
def test_add_relative_hint(echoes_game_description, empty_patches, precise_distance, location_precision, echoes_item_database):
# Setup
rng = Random(5000)
target_precision = MagicMock(spec=HintItemPrecision)
precision = MagicMock(spec=HintItemPrecision)
patches = empty_patches.assign_pickup_assignment({
PickupIndex(8): PickupTarget(_make_pickup(echoes_item_database.item_categories["movement"]), 0),
})
if location_precision == HintLocationPrecision.RELATIVE_TO_AREA:
max_distance = 8
data = RelativeDataArea(
None if precise_distance else 3,
# Was Industrial Site
AreaIdentifier("Temple Grounds", "Hive Chamber A"),
precision,
)
else:
max_distance = 20
data = RelativeDataItem(
None if precise_distance else 11,
PickupIndex(8),
precision,
)
# Run
result = runner.add_relative_hint(echoes_game_description.world_list,
patches,
rng,
PickupIndex(1),
target_precision,
location_precision,
precise_distance,
precision,
max_distance=max_distance)
# Assert
pair = PrecisionPair(location_precision, target_precision, include_owner=False, relative=data)
assert result == Hint(HintType.LOCATION, pair, PickupIndex(1))
|
from __future__ import print_function
import importlib
import inspect
import types
from abjad.tools.abctools.AbjadObject import AbjadObject
class InheritanceGraph(AbjadObject):
r'''Generates a graph of a class or collection of
classes as a dictionary of parent-children relationships:
::
>>> class A(object): pass
...
>>> class B(A): pass
...
>>> class C(B): pass
...
>>> class D(B): pass
...
>>> class E(C, D): pass
...
>>> class F(A): pass
...
::
>>> graph = documentationtools.InheritanceGraph(addresses=(F, E))
``InheritanceGraph`` may be instantiated from one or more instances,
classes or modules. If instantiated from a module, all public classes
in that module will be taken into the graph.
A `root_class` keyword may be defined at instantiation, which filters out
all classes from the graph which do not inherit from that `root_class`
(or are not already the `root_class`):
::
>>> graph = documentationtools.InheritanceGraph(
... (A, B, C, D, E, F),
... root_addresses=(B,),
... )
The class is intended for use in documenting packages.
To document all of Abjad, use this formulation:
::
>>> graph = documentationtools.InheritanceGraph(
... addresses=('abjad',),
... )
To document only those classes descending from Container,
use this formulation:
::
>>> graph = documentationtools.InheritanceGraph(
... addresses=('abjad',),
... root_addresses=(Container,),
... )
To document only those classes whose lineage pass through scoretools,
use this formulation:
::
>>> graph = documentationtools.InheritanceGraph(
... addresses=('abjad',),
... lineage_addresses=(scoretools,),
... )
When creating the Graphviz representation, classes in the inheritance
graph may be hidden, based on their distance from any defined lineage
class:
::
>>> graph = documentationtools.InheritanceGraph(
... addresses=('abjad',),
... lineage_addresses=(instrumenttools.Instrument,),
... lineage_prune_distance=1,
... )
Returns ``InheritanceGraph`` instance.
'''
### CLASS VARIABLES ###
__slots__ = (
'_addresses',
'_child_parents_mapping',
'_immediate_classes',
'_lineage_addresses',
'_lineage_classes',
'_lineage_distance_mapping',
'_lineage_prune_distance',
'_parent_children_mapping',
'_recurse_into_submodules',
'_root_addresses',
'_root_classes',
'_use_clusters',
'_use_groups',
)
### INITIALIZER ###
def __init__(
self,
addresses=('abjad',),
lineage_addresses=None,
lineage_prune_distance=None,
recurse_into_submodules=True,
root_addresses=None,
use_clusters=True,
use_groups=True,
):
self._recurse_into_submodules = bool(recurse_into_submodules)
if lineage_prune_distance is not None:
lineage_prune_distance = int(lineage_prune_distance)
assert 0 < lineage_prune_distance
self._lineage_prune_distance = lineage_prune_distance
self._use_clusters = bool(use_clusters)
self._use_groups = bool(use_groups)
# main addresses
if addresses is None:
addresses = ('abjad',)
all_main_classes, main_immediate_classes, main_cached_addresses = \
self._collect_classes(addresses, self.recurse_into_submodules)
self._addresses = main_cached_addresses
# lineage addresses
if lineage_addresses is not None:
all_lineage_classes, lineage_immediate_classes, lineage_cached_addresses = \
self._collect_classes(lineage_addresses, False)
self._lineage_addresses = lineage_cached_addresses
self._lineage_classes = frozenset(all_lineage_classes)
else:
self._lineage_addresses = None
self._lineage_classes = frozenset([])
# root addresses
if root_addresses is not None:
all_root_classes, root_immediate_classes, root_cached_addresses = \
self._collect_classes(root_addresses, False)
self._root_addresses = root_cached_addresses
self._root_classes = frozenset(all_root_classes)
else:
self._root_addresses = None
self._root_classes = frozenset([object])
child_parents_mapping, parent_children_mapping = \
self._build_basic_mappings(all_main_classes)
self._strip_nonlineage_classes(
child_parents_mapping, parent_children_mapping)
self._child_parents_mapping = child_parents_mapping
self._parent_children_mapping = parent_children_mapping
self._immediate_classes = main_immediate_classes.intersection(
self._parent_children_mapping.keys())
self._lineage_distance_mapping = self._find_lineage_distances()
### SPECIAL METHODS ###
def __graph__(self, **kwargs):
r'''Graphviz graph of inheritance graph.
'''
from abjad.tools import documentationtools
class_nodes = {}
graph = documentationtools.GraphvizGraph(
name='InheritanceGraph',
attributes={
'bgcolor': 'transparent',
'color': 'lightslategrey',
'fontname': 'Arial',
'outputorder': 'edgesfirst',
'overlap': 'prism',
'penwidth': 2,
#'ranksep': 0.5,
'splines': 'spline',
'style': ('dotted', 'rounded'),
'truecolor': True,
},
edge_attributes={
'color': 'lightsteelblue2',
'penwidth': 2,
},
node_attributes={
'colorscheme': 'pastel19',
'fontname': 'Arial',
'fontsize': 12,
'penwidth': 2,
'style': ('filled', 'rounded'),
},
)
for current_class in sorted(self.parent_children_mapping,
key=lambda x: (x.__module__, x.__name__)):
pieces = self._get_class_name_pieces(current_class)
try:
cluster = graph[pieces[0]]
except KeyError:
cluster = documentationtools.GraphvizSubgraph(
name=pieces[0],
attributes={
'label': pieces[0],
},
)
graph.append(cluster)
node = documentationtools.GraphvizNode(
name='.'.join(pieces),
)
node.attributes['label'] = pieces[-1]
if current_class in self.immediate_classes:
pass
if current_class in self.root_classes:
pass
if inspect.isabstract(current_class):
node.attributes['shape'] = 'oval'
node.attributes['style'] = 'bold'
else:
node.attributes['shape'] = 'box'
if current_class in self.lineage_classes:
node.attributes['color'] = 'black'
node.attributes['fontcolor'] = 'white'
node.attributes['style'] = ('filled', 'rounded')
if self.lineage_prune_distance is None:
cluster.append(node)
class_nodes[current_class] = node
elif current_class not in self.lineage_distance_mapping:
cluster.append(node)
class_nodes[current_class] = node
else:
ok_distance = self.lineage_prune_distance + 1
distance = self.lineage_distance_mapping[current_class]
if distance < ok_distance:
cluster.append(node)
class_nodes[current_class] = node
elif distance == ok_distance:
node.attributes['shape'] = 'invis'
node.attributes['style'] = 'transparent'
node.attributes['label'] = ' '
cluster.append(node)
class_nodes[current_class] = node
elif ok_distance < distance:
pass
distances = self.lineage_distance_mapping
for parent in sorted(self.parent_children_mapping,
key=lambda x: (x.__module__, x.__name__)):
children = self.parent_children_mapping[parent]
children = sorted(
children,
key=lambda x: (x.__module__, x.__name__),
)
for child in children:
ok_to_join = False
if self.lineage_prune_distance is None:
ok_to_join = True
elif parent not in distances:
if child not in distances:
ok_to_join = True
elif child in distances and \
distances[child] <= ok_distance:
ok_to_join = True
elif child not in distances:
if parent not in distances:
ok_to_join = True
elif parent in distances and \
distances[parent] <= ok_distance:
ok_to_join = True
elif distances[child] <= ok_distance and \
distances[parent] <= ok_distance:
ok_to_join = True
if ok_to_join:
parent_node = class_nodes[parent]
child_node = class_nodes[child]
documentationtools.GraphvizEdge()(parent_node, child_node)
for i, cluster in enumerate(
sorted(graph.children, key=lambda x: x.name)):
color = i % 9 + 1
for node in cluster:
if 'color' not in node.attributes:
node.attributes['color'] = color
if self.use_groups:
node.attributes['group'] = i
if not self.use_clusters:
graph.extend(cluster[:])
graph.remove(cluster)
if self.root_addresses is None:
graph.attributes['root'] = '__builtin__.object'
return graph
### PRIVATE METHODS ###
def _build_basic_mappings(self, classes):
child_parents_mapping = {}
parent_children_mapping = {}
invalid_classes = set([])
def recurse(current_class):
if current_class in child_parents_mapping:
return True
elif current_class in invalid_classes:
return False
mro = list(inspect.getmro(current_class))
while len(mro) and mro[-1] not in self.root_classes:
mro.pop()
if not mro:
invalid_classes.add(current_class)
return False
parents = [x for x in current_class.__bases__ if recurse(x)]
child_parents_mapping[current_class] = set(parents)
parent_children_mapping[current_class] = set([])
for parent in parents:
parent_children_mapping[parent].add(current_class)
return True
for current_class in classes:
recurse(current_class)
return child_parents_mapping, parent_children_mapping
def _collect_classes(self, addresses, recurse_into_submodules):
all_classes = set([])
cached_addresses = []
immediate_classes = set([])
visited_modules = set([])
assert 0 < len(addresses)
for x in addresses:
address = None
if isinstance(x, (str, types.ModuleType)):
if isinstance(x, types.ModuleType):
module = x
else:
try:
module = importlib.import_module(x)
except ImportError:
module = None
if module is None:
continue
for y in module.__dict__.values():
if isinstance(y, type):
all_classes.add(y)
immediate_classes.add(y)
elif isinstance(y, types.ModuleType) and \
recurse_into_submodules:
all_classes.update(
self._submodule_recurse(y, visited_modules))
address = module.__name__
else:
if isinstance(x, type):
current_class = x
elif isinstance(x, tuple) and len(x) == 2:
module_name, class_name = x
module = importlib.import_module(module_name)
current_class = getattr(module, class_name)
else:
current_class = x.__class__
all_classes.add(current_class)
immediate_classes.add(current_class)
address = (current_class.__module__, current_class.__name__)
if address is not None:
cached_addresses.append(address)
return all_classes, immediate_classes, tuple(cached_addresses)
def _find_lineage_distances(self):
if not self.lineage_classes:
return None
if not self.lineage_prune_distance:
return None
distance_mapping = {}
def recurse_downward(current_class, distance=0):
if current_class not in self.parent_children_mapping:
return
for child in self.parent_children_mapping[current_class]:
if child not in distance_mapping:
distance_mapping[child] = distance + 1
recurse_downward(child, distance + 1)
elif (distance + 1) < distance_mapping[child]:
distance_mapping[child] = distance + 1
recurse_downward(child, distance + 1)
for current_class in self.lineage_classes:
recurse_downward(current_class)
return distance_mapping
@staticmethod
def _get_class_name_pieces(current_class):
parts = (
current_class.__module__ + '.' + current_class.__name__
).split('.')
name = [parts[0]]
for part in parts[1:]:
if part != name[-1]:
name.append(part)
if 2 < len(name) and name[1] == 'tools':
#if name[0] in ('abjad', 'experimental'):
return name[2:]
return name
@staticmethod
def _recurse_downward(
current_class,
invalid_classes,
parent_children_mapping,
):
if current_class not in parent_children_mapping:
return
for child in parent_children_mapping[current_class]:
if child in invalid_classes:
invalid_classes.remove(child)
InheritanceGraph._recurse_downward(
child,
invalid_classes,
parent_children_mapping,
)
@staticmethod
def _recurse_upward(
current_class,
invalid_classes,
child_parents_mapping,
):
if current_class not in child_parents_mapping:
return
for parent in child_parents_mapping[current_class]:
if parent in invalid_classes:
invalid_classes.remove(parent)
InheritanceGraph._recurse_upward(
parent,
invalid_classes,
child_parents_mapping,
)
def _strip_nonlineage_classes(self,
child_parents_mapping, parent_children_mapping):
if not self.lineage_classes:
return
invalid_classes = set(
list(child_parents_mapping.keys()) +
list(parent_children_mapping.keys())
)
for current_class in self.lineage_classes:
if current_class in invalid_classes:
invalid_classes.remove(current_class)
InheritanceGraph._recurse_upward(
current_class,
invalid_classes,
child_parents_mapping,
)
InheritanceGraph._recurse_downward(
current_class,
invalid_classes,
parent_children_mapping
)
for current_class in invalid_classes:
for child in parent_children_mapping[current_class]:
child_parents_mapping[child].remove(current_class)
for parent in child_parents_mapping[current_class]:
parent_children_mapping[parent].remove(current_class)
del(parent_children_mapping[current_class])
del(child_parents_mapping[current_class])
def _submodule_recurse(self, module, visited_modules):
result = []
for obj in list(module.__dict__.values()):
if isinstance(obj, type):
result.append(obj)
elif isinstance(obj, types.ModuleType) and \
obj not in visited_modules:
visited_modules.add(obj)
result.extend(self._submodule_recurse(obj, visited_modules))
return result
### PUBLIC PROPERTIES ###
@property
def addresses(self):
r'''Addresses of inheritance graph.
'''
return self._addresses
@property
def child_parents_mapping(self):
r'''Child / parent mapping of inheritance graph.
'''
return self._child_parents_mapping
@property
def immediate_classes(self):
r'''Immediate classes of inheritance graph.
'''
return self._immediate_classes
@property
def lineage_addresses(self):
r'''Lineage addresses of inheritance graph.
'''
return self._lineage_addresses
@property
def lineage_classes(self):
r'''Lineage classes of inheritance graph.
'''
return self._lineage_classes
@property
def lineage_distance_mapping(self):
r'''Lineage distance mapping of inheritance graph.
'''
return self._lineage_distance_mapping
@property
def lineage_prune_distance(self):
r'''Lineage prune distance of inheritance graph.
'''
return self._lineage_prune_distance
@property
def parent_children_mapping(self):
r'''Parent / children mapping of inheritancee graph.
'''
return self._parent_children_mapping
@property
def recurse_into_submodules(self):
r'''Recurse into submodules.
'''
return self._recurse_into_submodules
@property
def root_addresses(self):
r'''Root addresses of inheritance graph.
'''
return self._root_addresses
@property
def root_classes(self):
r'''Root classes of inheritance graph.
'''
return self._root_classes
@property
def use_clusters(self):
r'''Use clusters.
'''
return self._use_clusters
@property
def use_groups(self):
r'''Use groups.
'''
return self._use_groups
|
import sys
import bottle_session
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from bottlereact import BottleReact
from bottle.ext import sqlalchemy
import bottle
PROD = '--prod' in sys.argv
MAILGUN_API_KEY = "NOTAREALKEY"
Base = declarative_base()
engine = create_engine(
'postgresql+psycopg2://stefan:test@localhost:5432/slothy_development',
echo=True)
db_plugin = sqlalchemy.Plugin(
engine,
Base.metadata,
keyword='db',
create=True,
commit=True,
use_kwargs=False
)
Session = sessionmaker(bind=engine)
app = application = bottle.Bottle()
app.install(db_plugin)
app.install(bottle_session.SessionPlugin(
cookie_lifetime=31540000)) # 1 year in seconds
br = BottleReact(app, prod=PROD, default_render_html_kwargs={"PROD": PROD})
|
bl_info = {
"name": "MecaFace",
"author": "Citrine's Animations",
"version": (1, 0, 4),
"blender": (2, 80, 0),
"location": "Side Bar",
"description": "Easily Add Face Rigs",
"warning": "",
"wiki_url": "www.mecabricks.com",
"category": "Add Mesh",
}
import os
import bpy
from bpy.props import BoolProperty
from bpy.types import PropertyGroup, Panel, Scene
addon_dirc =os .path .dirname (os .path .realpath (__file__ ))
bpy.types.Scene.test_float = bpy.props.FloatVectorProperty(name = "Base",subtype = "COLOR",size = 4,min = 0.0,max = 1.0,default = (0.000986,0.000986,0.000986,1.0))
bpy.types.Scene.eyes_float = bpy.props.FloatVectorProperty(name = "Base",subtype = "COLOR",size = 4,min = 0.0,max = 1.0,default = (0.000986,0.000986,0.000986,1.0))
bpy.types.Scene.pupil_float = bpy.props.FloatVectorProperty(name = "Pupils",subtype = "COLOR",size = 4,min = 0.0,max = 1.0,default = (0.904661,0.904661,0.904661,1.0))
bpy.types.Scene.omouth_float = bpy.props.FloatVectorProperty(name = "Outline",subtype = "COLOR",size = 4,min = 0.0,max = 1.0,default = (0.000986,0.000986,0.000986,1.0))
bpy.types.Scene.inmouth_float = bpy.props.FloatVectorProperty(name = "Base",subtype = "COLOR",size = 4,min = 0.0,max = 1.0,default = (0.000986,0.000986,0.000986,1.0))
bpy.types.Scene.tong_float = bpy.props.FloatVectorProperty(name = "Tongue",subtype = "COLOR",size = 4,min = 0.0,max = 1.0,default = (0.505879,0.0,0.010592,1.0))
bpy.types.Scene.teeth_float = bpy.props.FloatVectorProperty(name = "Teeth",subtype = "COLOR",size = 4,min = 0.0,max = 1.0,default = (0.904661,0.904661,0.904661,1.0))
bpy.types.Scene.lips_float = bpy.props.FloatVectorProperty(name = "Lips",subtype = "COLOR",size = 4,min = 0.0,max = 1.0,default = (0.439657,0.194618,0.066626,1.0))
bpy.types.Bone.line_colour = bpy.props.FloatVectorProperty(name = "Base",subtype = "COLOR",size = 4,min = 0.0,max = 1.0,default = (0.000986,0.000986,0.000986,1.0))
bpy.types.Scene.line_amount = bpy.props.IntProperty(default=0)
bpy.types.Bone.Start = bpy.props.IntProperty(default=0, min=0)
bpy.types.Bone.End = bpy.props.IntProperty(default=1, min=0)
def update_solids(self, context):
numors = str(bpy.context.scene.line_amount)
objectss = bpy.data.objects
if context.scene.solids_fies==True:
cb = objectss['Mouth']
cb.modifiers["Solidify"].show_viewport = True
cb.modifiers["Solidify"].show_render = True
hb = objectss['Lips']
hb.modifiers["Solidify"].show_viewport = True
hb.modifiers["Solidify"].show_render = True
ib = objectss['Eyelash1']
ib.modifiers["Solidify"].show_viewport = True
ib.modifiers["Solidify"].show_render = True
jb = objectss['Eyelash2']
jb.modifiers["Solidify"].show_viewport = True
jb.modifiers["Solidify"].show_render = True
db = objectss['EyeR']
db.modifiers["Solidify"].show_viewport = True
db.modifiers["Solidify"].show_render = True
eb = objectss['EyeL']
eb.modifiers["Solidify"].show_viewport = True
eb.modifiers["Solidify"].show_render = True
fb = objectss['BrowL']
fb.modifiers["Solidify"].show_viewport = True
fb.modifiers["Solidify"].show_render = True
gb = objectss['BrowR']
gb.modifiers["Solidify"].show_viewport = True
gb.modifiers["Solidify"].show_render = True
zb = objectss['FinLineMain' + numors]
zb.modifiers["Solidify"].show_viewport = True
zb.modifiers["Solidify"].show_render = True
else:
cb = objectss['Mouth']
cb.modifiers["Solidify"].show_viewport = False
cb.modifiers["Solidify"].show_render = False
hb = objectss['Lips']
hb.modifiers["Solidify"].show_viewport = False
hb.modifiers["Solidify"].show_render = False
ib = objectss['Eyelash1']
ib.modifiers["Solidify"].show_viewport = False
ib.modifiers["Solidify"].show_render = False
jb = objectss['Eyelash2']
jb.modifiers["Solidify"].show_viewport = False
jb.modifiers["Solidify"].show_render = False
db = objectss['EyeR']
db.modifiers["Solidify"].show_viewport = False
db.modifiers["Solidify"].show_render = False
eb = objectss['EyeL']
eb.modifiers["Solidify"].show_viewport = False
eb.modifiers["Solidify"].show_render = False
fb = objectss['BrowL']
fb.modifiers["Solidify"].show_viewport = False
fb.modifiers["Solidify"].show_render = False
gb = objectss['BrowR']
gb.modifiers["Solidify"].show_viewport = False
gb.modifiers["Solidify"].show_render = False
zb = objectss['FinLineMain' + numors]
zb.modifiers["Solidify"].show_viewport = False
zb.modifiers["Solidify"].show_render = False
def lipable(self, context):
objects = bpy.data.objects
gy = objects['Lips']
if context.scene.setvisi==True:
gy.hide_viewport = False
gy.hide_render = False
else:
gy.hide_viewport = True
gy.hide_render = True
def lashable(self, context):
objects = bpy.data.objects
gye = objects['Eyelash1']
gye2 = objects['Eyelash2']
if context.scene.setlash==True:
gye.hide_viewport = False
gye.hide_render = False
gye2.hide_viewport = False
gye2.hide_render = False
else:
gye.hide_viewport = True
gye.hide_render = True
gye2.hide_viewport = True
gye2.hide_render = True
bpy.types.Object.setbevel = bpy.props.BoolProperty(name="bevel", default=True)
bpy.types.Scene.setvisi = bpy.props.BoolProperty(name="Lips", default=False, update=lipable)
bpy.types.Scene.setlash = bpy.props.BoolProperty(name="Lashes", default=False, update=lashable)
class MainMecaFacePanel:
bl_label = "MecaFace"
bl_idname = "SCENE_PT_layout"
bl_category = "MecaFace"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
@classmethod
def poll(cls, context):
return (context.object is not None)
class MecaF0(MainMecaFacePanel, bpy.types.Panel):
"""Creates a Panel in the Object properties window"""
bl_label = "MecaFace"
bl_idname = "SCENE_PT_layout"
def draw(self, context):
layout = self.layout
obj = context.object
row = layout.row(align=True)
row.operator("do.it", text="Add face rig to selected")
row = layout.row(align=True)
row.prop(context.scene,"setvisi")
row.prop(context.scene,"setlash")
class MecaF1(MainMecaFacePanel, bpy.types.Panel):
bl_idname = "VIEW3D_PT_test_1"
bl_label = "Colour"
bl_parent_id = 'SCENE_PT_layout'
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
obj = context.object
# draw the checkbox (implied from property type = bool)
class MecaF2(MainMecaFacePanel, bpy.types.Panel):
bl_idname = "browee"
bl_label = "Eyebrows"
bl_parent_id = 'VIEW3D_PT_test_1'
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
obj = context.object
# draw the checkbox (implied from property type = bool)
layout.prop(context.scene, "test_float")
layout.operator("do.it2", text="Apply")
class MecaF3(MainMecaFacePanel, bpy.types.Panel):
bl_idname = "VIEW3D_PT_test_3"
bl_label = "Eyes"
bl_parent_id = 'VIEW3D_PT_test_1'
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
obj = context.object
# draw the checkbox (implied from property type = bool)
layout.prop(context.scene, "eyes_float")
layout.prop(context.scene, "pupil_float")
layout.operator("do.it3", text="Apply")
class MecaF4(MainMecaFacePanel, bpy.types.Panel):
bl_idname = "VIEW3D_PT_test_4"
bl_label = "Mouth"
bl_parent_id = 'VIEW3D_PT_test_1'
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
obj = context.object
# draw the checkbox (implied from property type = bool)
layout.prop(context.scene, "omouth_float")
layout.prop(context.scene, "inmouth_float")
layout.prop(context.scene, "tong_float")
layout.prop(context.scene, "teeth_float")
layout.operator("do.it4", text="Apply")
class MecaF5(MainMecaFacePanel, bpy.types.Panel):
bl_idname = "VIEW3D_PT_test_5"
bl_label = "Extra"
bl_parent_id = 'VIEW3D_PT_test_1'
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
obj = context.object
# draw the checkbox (implied from property type = bool)
layout.prop(context.scene, "lips_float")
layout.operator("do.it5", text="Apply")
class MecaDoIt2(bpy.types.Operator):
bl_idname = "do.it2"
bl_label = "Button texts"
def execute(self, context):
#self.report({'INFO'}, "Hello world!")
bpy.data.materials['Eyebrows'].node_tree.nodes["Shader"].inputs[0].default_value = bpy.context.scene.test_float
bpy.data.materials['Eyebrows'].diffuse_color = bpy.context.scene.test_float
return {'FINISHED'}
class MecaDoIt3(bpy.types.Operator):
bl_idname = "do.it3"
bl_label = "Button texts"
def execute(self, context):
#self.report({'INFO'}, "Hello world!")
bpy.data.materials['Eyes'].node_tree.nodes["Shader"].inputs[1].default_value = bpy.context.scene.eyes_float
bpy.data.materials['Eyes'].node_tree.nodes["Shader"].inputs[2].default_value = bpy.context.scene.pupil_float
bpy.data.materials['Eyes'].diffuse_color = bpy.context.scene.eyes_float
return {'FINISHED'}
class MecaDoIt4(bpy.types.Operator):
bl_idname = "do.it4"
bl_label = "Button texts"
def execute(self, context):
#self.report({'INFO'}, "Hello world!")
bpy.data.materials['OutlineMouth'].node_tree.nodes["Shader"].inputs[0].default_value = bpy.context.scene.omouth_float
bpy.data.materials['InnerMouth'].node_tree.nodes["Shader"].inputs[1].default_value = bpy.context.scene.inmouth_float
bpy.data.materials['InnerMouth'].node_tree.nodes["Shader"].inputs[2].default_value = bpy.context.scene.tong_float
bpy.data.materials['InnerMouth'].node_tree.nodes["Teeth1"].inputs[2].default_value = bpy.context.scene.teeth_float
bpy.data.materials['InnerMouth'].node_tree.nodes["Teeth2"].inputs[2].default_value = bpy.context.scene.teeth_float
bpy.data.materials['OutlineMouth'].diffuse_color = bpy.context.scene.omouth_float
bpy.data.materials['InnerMouth'].diffuse_color = bpy.context.scene.inmouth_float
return {'FINISHED'}
class MecaDoIt5(bpy.types.Operator):
bl_idname = "do.it5"
bl_label = "Button texts"
def execute(self, context):
#self.report({'INFO'}, "Hello world!")
bpy.data.materials['Lips'].node_tree.nodes["Shader"].inputs[0].default_value = bpy.context.scene.lips_float
bpy.data.materials['Lips'].diffuse_color = bpy.context.scene.lips_float
return {'FINISHED'}
class MecaF6(MainMecaFacePanel, bpy.types.Panel):
bl_idname = "VIEW3D_PT_test_2"
bl_label = "Final"
bl_parent_id = 'SCENE_PT_layout'
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
obj = context.object
row = layout.row(align=True)
row.operator("do.app", text="Finish")
# draw the checkbox (implied from property type = bool)
class MecaF7(MainMecaFacePanel, bpy.types.Panel):
bl_idname = "VIEW3D_PT_test_14"
bl_label = "Lines"
def draw(self, context):
layout = self.layout
obj = context.object
row = layout.row(align=True)
row.operator("line.it", text="Add line")
# draw the checkbox (implied from property type = bool)
class MecaF8(MainMecaFacePanel, bpy.types.Panel):
bl_idname = "VIEW3D_PT_test_15"
bl_label = "Colour"
bl_parent_id = 'VIEW3D_PT_test_14'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return (context.active_bone is not None)
def draw(self, context):
layout = self.layout
obj = context.object
# draw the checkbox (implied from property type = bool)
layout.prop(context.active_bone, "line_colour")
layout.operator("do.it9", text="Apply")
# draw the checkbox (implied from property type = bool)
class MecaF10(MainMecaFacePanel, bpy.types.Panel):
bl_idname = "VIEW3D_PT_test_16"
bl_label = "Set Visibility"
bl_parent_id = 'VIEW3D_PT_test_14'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return (context.active_bone is not None)
def draw(self, context):
layout = self.layout
row = layout.row(align=True)
row.prop(context.active_bone, "Start")
row.operator('temp.line')
row.prop(context.active_bone, "End")
# draw the checkbox (implied from property type = bool)
class MecaDoIt9(bpy.types.Operator):
bl_idname = "do.it9"
bl_label = "Button texts"
def execute(self, context):
#self.report({'INFO'}, "Hello world!")
mato = bpy.context.active_bone.name
obmatos = bpy.data.objects[mato]
obmatos.material_slots[0].material.node_tree.nodes["Shader"].inputs[0].default_value = bpy.context.active_bone.line_colour
obmatos.material_slots[0].material.diffuse_color = bpy.context.active_bone.line_colour
return {'FINISHED'}
class MecaDoIt(bpy.types.Operator):
bl_idname = "do.it"
bl_label = "Button text"
def execute(self, context):
#self.report({'INFO'}, "Hello world!")
a = bpy.context.selected_objects[0]
path = addon_dirc + "/TEst 1 with rig.blend/Collection/"
object_name = "Collection"
bpy.ops.wm.append(filename=object_name, directory=path)
objects = bpy.data.objects
b = objects['DeOne']
# Check if object 'a' is child of an Armature
if a.parent and a.parent.type == 'ARMATURE':
arm = a.parent
# Check if bone in Armature
bone = 'DEF_Cranium'
if bone in arm.pose.bones.keys():
b.parent = arm
b.parent_type = 'BONE'
b.parent_bone = bone
b.location = (0, -.96, 0)
b.scale = (.1, .1, .1)
else:
b.parent = a
else:
b.parent = a
c = objects['Mouth']
c.modifiers["Shrinkwrap"].target = a
c.modifiers["DataTransfer"].object = a
c.hide_select = True
h = objects['Lips']
h.modifiers["Shrinkwrap"].target = a
h.modifiers["DataTransfer"].object = a
h.hide_select = True
i = objects['Eyelash1']
i.modifiers["Shrinkwrap"].target = a
i.modifiers["DataTransfer"].object = a
i.hide_select = True
j = objects['Eyelash2']
j.modifiers["Shrinkwrap"].target = a
j.modifiers["DataTransfer"].object = a
j.hide_select = True
d = objects['EyeR']
d.modifiers["Shrinkwrap"].target = a
d.modifiers["DataTransfer"].object = a
d.hide_select = True
e = objects['EyeL']
e.modifiers["Shrinkwrap"].target = a
e.modifiers["DataTransfer"].object = a
e.hide_select = True
f = objects['BrowL']
f.modifiers["Shrinkwrap"].target = a
f.modifiers["DataTransfer"].object = a
f.hide_select = True
g = objects['BrowR']
g.modifiers["Shrinkwrap"].target = a
g.modifiers["DataTransfer"].object = a
g.hide_select = True
collections = bpy.data.collections
h = collections['UV controllers']
h.hide_viewport = True
return {'FINISHED'}
class MecaDoApp(bpy.types.Operator):
bl_idname = "do.app"
bl_label = "Button texts"
def execute(self, context):
#self.report({'INFO'}, "Hello world!")
objectss = bpy.data.objects
bb = objectss['DeOne']
bb.name = "DeFin"
cb = objectss['Mouth']
cb.name = "MouthFin"
hb = objectss['Lips']
hb.name = "LipsFin"
ib = objectss['Eyelash1']
ib.name = "Eyelash1Fin"
jb = objectss['Eyelash2']
jb.name = "Eyelash2Fin"
db = objectss['EyeR']
db.name = "EyeRFin"
eb = objectss['EyeL']
eb.name = "EyeLFin"
fb = objectss['BrowL']
fb.name = "BrowLFin"
gb = objectss['BrowR']
gb.name = "BrowRFin"
bpy.data.materials['Eyebrows'].name = "EyebrowsFin"
bpy.data.materials['Eyes'].name = "EyesFin"
bpy.data.materials['OutlineMouth'].name = "OutlineMouthFin"
bpy.data.materials['InnerMouth'].name = "InnerMouthFin"
bpy.data.materials['Lips'].name = "LipsFin"
collectionss = bpy.data.collections
hb = collectionss['UV controllers']
hb.name = "UV controllersFin"
return {'FINISHED'}
class MecaLineIt(bpy.types.Operator):
bl_idname = "line.it"
bl_label = "Button text"
def execute(self, context):
#self.report({'INFO'}, "Hello world!")
bpy.context.scene.line_amount = bpy.context.scene.line_amount + 1
numors = str(bpy.context.scene.line_amount)
a = bpy.context.selected_objects[0]
path = addon_dirc + "/lines.blend/Collection/"
object_name = "Collection"
bpy.ops.wm.append(filename=object_name, directory=path)
objects = bpy.data.objects
arms = bpy.data.armatures
c = objects['LineMain']
c.modifiers["Shrinkwrap"].target = a
c.modifiers["DataTransfer"].object = a
c.hide_select = True
c.name = "FinLineMain" + numors
objects = bpy.data.objects
f = objects['Lineys'].children[0]
d = arms['Lineys'].bones['LineMain']
d.name = f.name
b = objects['Lineys']
e = arms['Lineys']
b.parent = a
b.name = "FinLineys" + numors
e.name = "FinLineys" + numors
collections = bpy.data.collections
h = collections['VUS']
h.hide_viewport = True
return {'FINISHED'}
class LineTemp(bpy.types.Operator):
bl_idname = "temp.line"
bl_label = "Set"
def execute(self, context):
#self.report({'INFO'}, "Hello world!")
mato = bpy.context.active_bone.name
afaf = bpy.data.objects[mato]
bruv = bpy.context.scene.frame_current-afaf.Start
bruv2 = bpy.context.scene.frame_current+afaf.End+1
bruv3 = bpy.context.scene.frame_current-afaf.Start-1
afaf.hide_render = True
afaf.hide_viewport = True
afaf.keyframe_insert(data_path="hide_render", frame = bruv3)
afaf.keyframe_insert(data_path="hide_viewport", frame = bruv3)
afaf.hide_render = False
afaf.hide_viewport = False
afaf.keyframe_insert(data_path="hide_render", frame = bruv)
afaf.keyframe_insert(data_path="hide_viewport", frame = bruv)
afaf.hide_render = True
afaf.hide_viewport = True
afaf.keyframe_insert(data_path="hide_render", frame = bruv2)
afaf.keyframe_insert(data_path="hide_viewport", frame = bruv2)
return {'FINISHED'}
def register():
bpy.utils.register_class(MecaDoIt)
bpy.utils.register_class(MecaDoApp)
bpy.utils.register_class(MecaF0)
bpy.utils.register_class(MecaF1)
bpy.utils.register_class(MecaDoIt2)
bpy.utils.register_class(MecaF2)
bpy.utils.register_class(MecaF3)
bpy.utils.register_class(MecaDoIt3)
bpy.utils.register_class(MecaF4)
bpy.utils.register_class(MecaDoIt4)
bpy.utils.register_class(MecaF5)
bpy.utils.register_class(MecaF6)
bpy.utils.register_class(MecaDoIt5)
bpy.utils.register_class(MecaLineIt)
bpy.utils.register_class(MecaF7)
bpy.utils.register_class(MecaF8)
bpy.utils.register_class(MecaDoIt9)
bpy.utils.register_class(LineTemp)
bpy.utils.register_class(MecaF10)
def unregister():
bpy.utils.unregister_class(MecaDoIt)
bpy.utils.unregister_class(MecaDoApp)
bpy.utils.unregister_class(MecaF0)
bpy.utils.unregister_class(MecaF1)
bpy.utils.unregister_class(MecaDoIt2)
bpy.utils.unregister_class(MecaF2)
bpy.utils.unregister_class(MecaF3)
bpy.utils.unregister_class(MecaDoIt3)
bpy.utils.unregister_class(MecaF4)
bpy.utils.unregister_class(MecaDoIt4)
bpy.utils.unregister_class(MecaF5)
bpy.utils.unregister_class(MecaF6)
bpy.utils.unregister_class(MecaDoIt5)
bpy.utils.unregister_class(MecaLineIt)
bpy.utils.unregister_class(MecaF7)
bpy.utils.unregister_class(MecaF8)
bpy.utils.unregister_class(MecaDoIt9)
bpy.utils.unregister_class(LineTemp)
bpy.utils.unregister_class(MecaF10)
if __name__ == "__main__":
register()
|
from common import *
properties = ArrayList()
properties.add( DoubleProperty( "speed" ) )
class Player(AbstractRole) :
def __init__(self) :
self.speed = 1
self.collected = 0
def onBirth(self) :
self.addTag("player")
self.inputClick = Input.find("click")
def onSceneCreated(self) :
self.view = game.layout.findView("middle")
def tick(self):
oldX = self.actor.x
oldY = self.actor.y
oldDirection = self.actor.direction
mouseX = self.view.getWorldX( Itchy.getMouseX() )
mouseY = self.view.getWorldY( Itchy.getMouseY() )
distance = self.actor.position.distance( mouseX, mouseY )
if distance > 10 :
self.actor.direction = self.actor.position.directionDegrees( mouseX, mouseY )
self.actor.moveForwards( self.speed if distance > 100 else self.speed * distance / 100 )
if self.inputClick.pressed() :
if self.collided( "clickable" ) :
self.collisions( "clickable" ).get(0).click()
if self.collided("solid") :
self.actor.direction = oldDirection
# Hit a wall, so let's try moving x and y independantly
tx = self.actor.x
self.actor.moveTo( oldX, self.actor.y )
if self.collided("solid") :
self.actor.moveTo( tx, oldY )
if self.collided("solid") :
# Give up, move back.
self.actor.moveTo(oldX, oldY)
screenX = self.actor.x - self.view.visibleRectangle.x
screenY = self.actor.y - self.view.visibleRectangle.y
if screenX < 200 :
game.sceneDirector.scrollBy( -1 + self.actor.x - oldX, 0 )
if screenX > 600 :
game.sceneDirector.scrollBy( 1 + self.actor.x -oldX, 0 )
if screenY < 150 :
game.sceneDirector.scrollBy( 0, -4 )
if screenY > 450 :
game.sceneDirector.scrollBy( 0, 4 )
if self.inputClick.pressed() :
if self.collided( "clickable" ) :
self.collisions( "clickable" ).get(0).click()
if self.collided("light") :
game.sceneDirector.caught()
return
if self.collided("exit") :
exit = self.collisions("exit").get(0)
if self.collected >= game.sceneDirector.collectables :
game.startScene( exit.nextLevel )
for collectable in self.collisions("collectable") :
self.collected += collectable.collect()
# Boiler plate code - no need to change this
def getProperties(self):
return properties
# Boiler plate code - no need to change this
def getClassName(self):
return ClassName( Role, self.__module__ + ".py" )
|
__prog_name__ = 'update_database_from_ftp.py'
__prog_desc__ = ('Update the GTDB with the latest genome downloaded from FTP.' +
'Before this update, make sure all metadata have been generated and CheckM did run on all new genomes')
__author__ = 'Pierre Chaumeil'
__copyright__ = 'Copyright 2016'
__credits__ = ['Pierre Chaumeil']
__license__ = 'GPL3'
__version__ = '0.0.1'
__maintainer__ = 'Pierre Chaumeil'
__email__ = 'p.chaumeil@qfab.org'
__status__ = 'Development'
import os
import shutil
import hashlib
import re
import glob
import gzip
import sys
import argparse
import datetime
import ntpath
from biolib.common import remove_extension
from dateutil.parser import parse
from database_configuration import GenomeDatabaseConnectionFTPUpdate
class UpdateGTDBDatabase(object):
def __init__(self, db, date):
self.db = db
# By default we set the id to genbank (it is either 2 or 3
self.id_database = 3
if db == "refseq":
self.id_database = 2
self.domains = ["archaea", "bacteria"]
self.report_database_update = open(
"report_{0}_{1}_update_db.log".format(db, date), "w")
self.temp_con = GenomeDatabaseConnectionFTPUpdate.GenomeDatabaseConnectionFTPUpdate()
self.temp_con.MakePostgresConnection()
self.temp_cur = self.temp_con.cursor()
def runUpdate(self, checkm, genome_dirs_file, dl_date):
update_date = self.parse_date(dl_date)
dict_existing_records = self._populateExistingRecords()
list_checkm_records = self._populateNewRecords(checkm)
genome_dirs_dict = self._populateGenomeDirs(genome_dirs_file)
# Check if the genome is an existing genome
# short_checkm_records list the records that are either to add or
# version
short_checkm_records = self._updateExistingGenomes(
dict_existing_records, list_checkm_records, genome_dirs_dict)
self.temp_con.commit()
self._addOrVersionNewGenomes(
dict_existing_records, short_checkm_records, genome_dirs_dict, update_date)
self.temp_con.commit()
# Because we have added and updated script we repopulate
# dict_existing_records
dict_existing_records = self._populateExistingRecords()
self._checkPathorRemoveRecord(
dict_existing_records, genome_dirs_dict, short_checkm_records)
self.temp_con.commit()
self.report_database_update.close()
def _checkPathorRemoveRecord(self, dict_existing_records, genome_dirs_dict, list_checkm_records):
count = 1
for record in dict_existing_records:
print "_checkPathorRemoveRecord: {0}/{1}".format(count, len(dict_existing_records))
count += 1
# if the record was part of the checkm file, it has already been
# updated
if record in list_checkm_records:
continue
else:
if record not in genome_dirs_dict:
self._removeRecord(record)
else:
self._checkPathRecord(
record, dict_existing_records[record], genome_dirs_dict[record])
def _removeRecord(self, record):
self.report_database_update.write(
"{0}\t{1}\tremoved\n".format(self.db, record))
query = ("SELECT gl.id,gl.name,us.username " +
"FROM genome_lists as gl " +
"LEFT JOIN genome_list_contents as glc on glc.list_id=gl.id " +
"LEFT JOIN users as us on us.id=gl.owner_id " +
"LEFT JOIN genomes as ge on ge.id=glc.genome_id " +
"WHERE ge.name like '{0}'".format(record))
self.temp_cur.execute(query)
raw_results = self.temp_cur.fetchall()
self.report_database_update.write("{0}\n".format(record))
if len(raw_results) > 0:
self.report_database_update.write(
"modified list(s) [{0}]:\n".format(record))
for result in raw_results:
self.report_database_update.write(
"list_id:{0}\tlist_name:{1}\t,list_owner:{2}\n".format(*result))
self.report_database_update.write("###########\n")
else:
self.report_database_update.write("No list has been modified\n")
self.report_database_update.write("###########\n")
query_delete = (
"DELETE FROM genomes WHERE name LIKE '{0}'".format(record))
self.temp_cur.execute(query_delete)
def _checkPathRecord(self, record, path_in_db, path_in_folder):
if path_in_db not in path_in_folder:
path_in_folder = re.sub(
r"(^.+\/)(genbank\/|refseq\/)", r"\g<2>", path_in_folder)
path_in_folder += "/" + os.path.basename(path_in_folder)
path_in_db = re.sub(r"(.+)(_genomic.fna)", r"\g<1>", path_in_db)
query = "update genomes set fasta_file_location = replace(fasta_file_location, '{0}', '{1}') where id_at_source like '{2}'".format(
path_in_db, path_in_folder, record)
self.report_database_update.write("{0}\t{1}\tupdate path\t{2}\t{3}\n".format(
self.db, record, path_in_db, path_in_folder))
self.temp_cur.execute(query)
pathinfo = path_in_folder.rsplit('/', 1)
genes_path = os.path.join(
pathinfo[0], 'prodigal', record + "_protein.faa").replace("\\", "/")
query = "update genomes set genes_file_location = '{0}' where id_at_source like '{1}'".format(
genes_path, record)
self.temp_cur.execute(query)
def _addOrVersionNewGenomes(self, dict_existing_records, list_checkm_records, genome_dirs_dict, update_date):
count = 1
for checkm_record in list_checkm_records:
print "_addOrVersionNewGenomes: {0}/{1}".format(count, len(list_checkm_records))
count += 1
if (checkm_record not in dict_existing_records) and (checkm_record in genome_dirs_dict):
check_record_base = checkm_record.rsplit(".", 1)[0]
id_record = self._checkPreviousVersion(check_record_base)
if id_record < 0: # -1
# we add the genome to the database
self._addNewGenomes(
checkm_record, genome_dirs_dict, update_date)
else:
self._addNewGenomes(
checkm_record, genome_dirs_dict, update_date, id_record)
def normaliseID(self, accession):
normaccession = "G" + accession[4:accession.find('.', 0)]
return normaccession
def _addNewGenomes(self, checkm_record, genome_dirs_dict, update_date, id_record=None):
list_genome_details = [checkm_record]
list_genome_details.append('') # description
list_genome_details.append(True)
list_genome_details.append(None)
fasta_file_path = os.path.join(genome_dirs_dict[checkm_record],
os.path.basename(genome_dirs_dict[checkm_record]) + "_genomic.fna")
fasta_file_path_shorten = re.sub(
r"(.+/)(genbank\/|refseq\/)", r"\g<2>", fasta_file_path)
list_genome_details.append(fasta_file_path_shorten)
list_genome_details.append(self.sha256Calculator(fasta_file_path))
list_genome_details.append(self.id_database)
list_genome_details.append(checkm_record)
list_genome_details.append(update_date)
list_genome_details.append(True)
list_genome_details.append(update_date)
_genome_path, genome_id = ntpath.split(genome_dirs_dict[checkm_record])
genome_id = genome_id[0:genome_id.find('_', 4)]
gene_file_path = os.path.join(
genome_dirs_dict[checkm_record], "prodigal", genome_id + "_protein.faa")
gene_file_path_shorten = re.sub(
r"(.+/)(genbank\/|refseq\/)", r"\g<2>", gene_file_path)
list_genome_details.append(gene_file_path_shorten)
list_genome_details.append(self.sha256Calculator(gene_file_path))
list_genome_details.append(self.normaliseID(checkm_record))
if id_record is None:
self.report_database_update.write(
"{0}\t{1}\tadd\n".format(self.db, checkm_record))
self.temp_cur.execute("INSERT INTO genomes " +
"(name,description,owned_by_root,owner_id,fasta_file_location,fasta_file_sha256,genome_source_id,id_at_source,date_added,has_changed,last_update,genes_file_location,genes_file_sha256,formatted_source_id) " +
"VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", list_genome_details)
else:
self.report_database_update.write(
"{0}\t{1}\tversion\n".format(self.db, checkm_record))
self.temp_cur.execute("UPDATE genomes " +
"SET name = %s,description = %s, " +
"owned_by_root = %s,owner_id = %s, " +
"fasta_file_location = %s,fasta_file_sha256 = %s, " +
"genome_source_id = %s,id_at_source = %s, " +
"date_added = %s,has_changed = %s, " +
"last_update = %s,genes_file_location = %s, " +
"genes_file_sha256 = %s, " +
"formatted_source_id = %s WHERE id = {0}; ".format(id_record), list_genome_details)
self.temp_cur.execute(
"DELETE FROM aligned_markers where genome_id = {0};".format(id_record))
def _updateExistingGenomes(self, dict_existing_records, list_checkm_records, genome_dirs_dict):
new_list_checkm_records = []
count = 1
for checkm_record in list_checkm_records:
print "_updateExistingGenomes: {0}/{1}".format(count, len(list_checkm_records))
count += 1
if (checkm_record in dict_existing_records) and (checkm_record in genome_dirs_dict):
self.report_database_update.write(
"{0}\t{1}\tupdate protein file\n".format(self.db, checkm_record))
path_gtdb = re.sub(
r"(^.+\/)(genbank\/|refseq\/)", r"\g<2>", genome_dirs_dict[checkm_record])
path_gtdb += "/" + os.path.basename(path_gtdb)
path_database = re.sub(
r"(.+)(_genomic.fna)", r"\g<1>", dict_existing_records[checkm_record])
path_protein_database = re.sub(
r"(.+)/(GC._[^_]+)(.*)", r"\g<1>/prodigal/\g<2>_protein.faa", path_database)
# If the record is in a different folder , we need to change
# it's path in the database
if path_database not in path_gtdb:
query = "update genomes set fasta_file_location = replace(fasta_file_location, '{0}', '{1}') where name like '{2}'".format(
path_database, path_gtdb, checkm_record)
self.temp_cur.execute(query)
query = "update genomes set genes_file_location = '{0}' where name like '{1}'".format(
path_protein_database, checkm_record)
self.temp_cur.execute(query)
# if the records is in the Checkm folder that means genomics
# and protein files have changed. We need to re write their
# sha256 values
genomic_files = glob.glob(
genome_dirs_dict[checkm_record] + "/*_genomic.fna")
if len(genomic_files) == 1:
genomic_file = genomic_files[0]
new_md5_genomic = self.sha256Calculator(genomic_file)
query = "update genomes set fasta_file_sha256 = '{0}' where name like '{1}'".format(
new_md5_genomic, checkm_record)
self.temp_cur.execute(query)
_genome_path, genome_id = ntpath.split(
genome_dirs_dict[checkm_record])
genome_id = genome_id[0:genome_id.find('_', 4)]
gene_file_path = os.path.join(
genome_dirs_dict[checkm_record], "prodigal")
gene_files = glob.glob(gene_file_path + "/*_protein.faa")
if len(gene_files) == 1:
gene_file = gene_files[0]
new_md5_gene = self.sha256Calculator(gene_file)
query = "update genomes set genes_file_sha256 = '{0}' where name like '{1}'".format(
new_md5_gene, checkm_record)
self.temp_cur.execute(query)
else:
new_list_checkm_records.append(checkm_record)
return new_list_checkm_records
def _populateGenomeDirs(self, genome_dirs_file):
with open(genome_dirs_file, 'r') as gen_file:
gen_dict = {gen_line.split("\t")[0]: gen_line.split("\t")[1].strip()
for gen_line in gen_file}
return gen_dict
def _populateNewRecords(self, checkm):
list_result = []
checkm_fh = open(checkm, "rb")
checkm_fh.readline()
for line in checkm_fh:
full_name = line.split("\t")[0]
name = full_name.split("_")[0] + "_" + full_name.split("_")[1]
list_result.append(name)
return list_result
def _populateExistingRecords(self):
self.temp_cur.execute("SELECT gen.name,gen.fasta_file_location " +
"FROM genomes as gen " +
"WHERE gen.genome_source_id = {0} ;".format(self.id_database))
dict_records = {key: value for (key, value) in self.temp_cur}
return dict_records
def _checkPreviousVersion(self, checkm_record):
self.temp_cur.execute("SELECT gen.id " +
"FROM genomes as gen " +
"WHERE gen.id_at_source like '{0}.%' ;".format(checkm_record))
list_result = [record for (record,) in self.temp_cur]
if len(list_result) > 0:
return list_result[0]
else:
return -1
def sha256Calculator(self, file_path):
try:
filereader = open(file_path, "rb")
except:
raise Exception("Cannot open Fasta file: " + file_path)
m = hashlib.sha256()
for line in filereader:
m.update(line)
sha256_checksum = m.hexdigest()
filereader.close()
return sha256_checksum
def parse_date(self, date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
return parse(date_text)
if __name__ == "__main__":
print __prog_name__ + ' v' + __version__ + ': ' + __prog_desc__
print ' by ' + __author__ + ' (' + __email__ + ')' + '\n'
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--checkm_profile_new_genomes', dest="checkm",
required=True, help='path_to_checkm file')
parser.add_argument('--genome_dirs_file', dest="genome_dirs_file",
required=True, help='genome_dirs file listing all REcords')
parser.add_argument('--database', dest="db", required=True, choices=["refseq", "genbank"],
help='RefSeq or Genbank')
parser.add_argument('--ftp_download_date', dest="date", required=True,
help='Date when the FTP download has been run (format YYYY-MM-DD)')
args = parser.parse_args()
try:
update_mngr = UpdateGTDBDatabase(args.db, args.date)
update_mngr.runUpdate(args.checkm, args.genome_dirs_file, args.date)
except SystemExit:
print "\nControlled exit resulting from an unrecoverable error or warning."
except:
print "\nUnexpected error:", sys.exc_info()[0]
raise
|
from pyntc import ntc_device as NTC
import json
sw1 = NTC(host='192.168.122.10', username='test', password='test', device_type="cisco_ios_ssh")
sw1.open()
ios_output = sw1.facts
sw1.config_list(['router ospf 1',
'network 0.0.0.0 255.255.255.255 area 0'])
|
import os,sys,struct
import numpy as np
noargs = len(sys.argv)
if ( noargs != 2 ):
print( "No filename specified." )
exit()
fh = open(sys.argv[1], 'rb' )
header_raw = fh.read(struct.calcsize("llllllliidddddddddddddd"))
header = struct.unpack( "llllllliidddddddddddddd", header_raw )
nDims = header[3]
nDimX = header[4]
nDimY = header[5]
nDimZ = header[6]
bCmpx = header[8]
t = header[9]
xMin = header[10]
xMax = header[11]
yMin = header[12]
yMax = header[13]
zMin = header[14]
zMax = header[15]
dx = header[16]
dy = header[17]
dz = header[18]
dkx = header[19]
dky = header[20]
dkz = header[21]
dt = header[22]
if ( header[0] != 1380 ):
print( "Invalid file format." )
exit()
if ( bCmpx != 1 ):
print( "File does not contain complex data." )
exit()
print("sizeof(generic_header) = %ld\n" % (header[0]))
print("dims = (%ld,%ld,%ld)\n" % (nDimX,nDimY,nDimZ))
print("xrange = (%g,%g)\n" % (xMin,xMax))
print("yrange = (%g,%g)\n" % (yMin,yMax))
print("zrange = (%g,%g)\n" % (zMin,zMax))
print("t = %g\n" % (t))
fh.seek(header[0],0)
cmplxsize = struct.calcsize("dd")
if (nDims == 1):
data = np.zeros((nDimX),dtype=np.complex_)
for i in range(0, nDimX):
rawcplxno = fh.read(cmplxsize)
cmplxno = struct.unpack( "dd", rawcplxno )
#print("(%g,%g)\n" % (cmplxno[0],cmplxno[1]))
data[i] = complex(cmplxno[0],cmplxno[1])
if (nDims == 2):
data = np.zeros((nDimX,nDimY),dtype=np.complex_)
for i in range(0, nDimX):
for j in range(0, nDimY):
rawcplxno = fh.read(cmplxsize)
cmplxno = struct.unpack( "dd", rawcplxno )
#print("%s\n" % type(cmplxno))
#print("(%d,%d) (%g,%g)\n" % (i,j,cmplxno[0],cmplxno[1]))
data[i,j] = complex(cmplxno[0],cmplxno[1])
if (nDims == 3):
data = np.zeros((nDimX,nDimY,nDimZ),dtype=np.complex_)
for i in range(0, nDimX):
for j in range(0, nDimY):
for k in range(0, nDimZ):
rawcplxno = fh.read(cmplxsize)
cmplxno = struct.unpack( "dd", rawcplxno )
#print("(%d,%d,%d) (%g,%g)\n" % (i,j,k,cmplxno[0],cmplxno[1]))
data[i,j,k] = complex(cmplxno[0],cmplxno[1])
fh.close()
|
import codecs
import ConfigParser
import json
import re
import requests
import sys
import time
from os import listdir, remove
from os.path import dirname, exists, expanduser, isfile, join, realpath
activate_this = join(expanduser('~'), 'ckan_default', 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
sys.path.append(join(expanduser('~'), 'shared', 'bin'))
import ckanapi
import validate_data_json
def config_section_map(section):
dict1 = {}
options = config.options(section)
for option in options:
try:
dict1[option] = config.get(section, option)
if dict1[option] == -1:
print('skip: %s' % option)
except:
print('exception on %s!' % option)
dict1[option] = None
return dict1
def create_missing_dir(directory):
if not exists(directory):
os.makedirs(directory, 0755)
def delete_resources(lang, dataset_id):
print('[%s] deleting resources for dataset id=%s...' % (lang, dataset_id))
dataset = ckan[lang].action.package_show(id=dataset_id)
for resource in dataset['resources']:
resource_id = resource['id']
ckan[lang].action.resource_delete(id=resource_id)
print('- resource id=%s deleted' % resource_id)
def add_resource(lang, dataset_id, resource_url, resource_format, resource_title, resource_description,
is_api, schema):
print('[%s] adding resource for dataset id=%s...' % (lang, dataset_id))
ckan[lang].action.resource_create(package_id=dataset_id,
url=resource_url,
format=resource_format.upper(),
name=resource_title,
description=resource_description,
is_api=is_api,
schema=schema)
print('- resource url=%s added' % resource_url)
def add_resources(dataset_id, resources):
for resource in resources:
for lang in lang_list:
add_resource(lang, dataset_id, resource['accessURL'], resource['format'],
resource['resourceTitle'][map_lang_code(lang)],
resource['resourceDescription'][map_lang_code(lang)],
resource['isAPI'], resource['schema'] if 'schema' in resource else None)
def refresh_resources(dataset_id, resources):
for lang in lang_list:
delete_resources(lang, dataset_id)
add_resources(dataset_id, resources)
def delete_dataset(dataset_id):
for lang in lang_list:
ckan[lang].action.package_delete(id=dataset_id)
def modify_dataset(provider_id, dataset_id, dataset):
print('change dataset')
for lang in lang_list:
mapped_lang = map_lang_code(lang)
extras = [{'key': 'update_frequency', 'value': dataset['updateFrequency'][mapped_lang]}]
if 'dataDictionary' in dataset:
extras.append({'key': 'data_dictionary', 'value': dataset['dataDictionary'][mapped_lang]})
if 'phone' in dataset:
extras.append({'key': 'maintainer_phone', 'value': dataset['phone']})
if 'references' in dataset:
references = []
for reference in dataset['references']:
references.append(reference[mapped_lang])
extras.append({'key': 'references', 'value': '\n'.join(references)})
if 'sources' in dataset:
sources = []
for source in dataset['sources']:
sources.append(source[mapped_lang])
extras.append({'key': 'sources', 'value': '\n'.join(sources)})
ckan[lang].action.package_update(id=dataset_id,
title=dataset['title'][mapped_lang],
maintainer=dataset['contactPoint'][mapped_lang] if 'contactPoint' in dataset else None,
maintainer_email=dataset['mbox'],
notes=dataset['description'][mapped_lang],
groups=[{'name': dataset['group']}],
owner_org=provider_id,
extras=extras)
refresh_resources(dataset_id, dataset['resources'])
def map_lang_code(lang):
if lang == 'tc':
return 'zh-Hant'
if lang == 'sc':
return 'zh-Hans'
return lang
def add_dataset(provider_id, dataset_id, dataset):
print('add dataset')
for lang in lang_list:
mapped_lang = map_lang_code(lang)
extras = [{'key': 'update_frequency', 'value': dataset['updateFrequency'][mapped_lang]}]
if 'dataDictionary' in dataset:
extras.append({'key': 'data_dictionary', 'value': dataset['dataDictionary'][mapped_lang]})
if 'phone' in dataset:
extras.append({'key': 'maintainer_phone', 'value': dataset['phone']})
if 'references' in dataset:
references = []
for reference in dataset['references']:
references.append(reference[mapped_lang])
extras.append({'key': 'references', 'value': '\n'.join(references)})
if 'sources' in dataset:
sources = []
for source in dataset['sources']:
sources.append(source[mapped_lang])
extras.append({'key': 'sources', 'value': '\n'.join(sources)})
ckan[lang].action.package_create(name=dataset_id,
title=dataset['title'][mapped_lang],
maintainer=dataset['contactPoint'][mapped_lang] if 'contactPoint' in dataset else None,
maintainer_email=dataset['mbox'],
notes=dataset['description'][mapped_lang],
groups=[{'name': dataset['group']}],
owner_org=provider_id,
extras=extras)
add_resources(dataset_id, dataset['resources'])
def check_last_json(current_json, last_json_file):
if isfile(last_json_file):
with codecs.open(last_json_file, 'r', 'utf-8') as f:
try:
last_json = json.load(f)
except ValueError:
last_json = {}
if current_json == last_json:
return 0 # no change
rc = 1 # changed
else:
rc = 2 # new, not exist
with codecs.open(last_json_file, 'w', 'utf-8') as f:
json.dump(current_json, f)
return rc
def process_dataset(provider_id, dataset, provider_last_dataset_dir):
internal_id = dataset['identifier']
# TODO: validate internal_id
#dataset_id = generate_dataset_id(dataset['title']['en'])
dataset_id = '%s-%s' % (provider_id, internal_id)
# TODO: check if dataset_id used and not associated with this provider/internal_id
print('processing dataset (provider=%s, internal-id=%s, dataset=%s)...'
% (provider_id, internal_id, dataset_id))
dataset_last_json_file = join(provider_last_dataset_dir, internal_id)
rc = check_last_json(dataset, dataset_last_json_file)
if rc == 0: # no change
print('no change')
return None
if rc == 1: # changed
print('changed')
modify_dataset(provider_id, dataset_id, dataset)
return 'dataset "%s" modified' % dataset_id
else: # rc = 2, new dataset
print('new dataset')
add_dataset(provider_id, dataset_id, dataset)
return 'dataset "%s" added' % dataset_id
def save_harvest_result(provider_id, result):
result_dump = json.dumps({'timestamp': time.strftime('%Y-%m-%d %H:%M', time.localtime()),
'result': result})
dump_file = join(expanduser('~'), 'shared', 'data-json-status', 'harvest', provider_id)
with codecs.open(dump_file, 'w', 'utf-8') as f:
f.write(result_dump)
def process_provider(provider_id, provider_url):
print('%s ==> %s' % (provider_id, provider_url))
provider_history_dir = join(history_dir, provider_id)
create_missing_dir(provider_history_dir)
provider_timeline_dir = join(provider_history_dir, 'timeline')
create_missing_dir(provider_timeline_dir)
provider_last_dataset_dir = join(provider_history_dir, 'last-dataset')
create_missing_dir(provider_last_dataset_dir)
result = []
try:
request = requests.get(provider_url)
current_json = request.json()
result = validate_data_json.validate(current_json)
except Exception as error:
result.append(error.args[0])
if not result:
dataset_count = len(current_json)
resource_count = 0
for dataset in current_json:
resource_count = resource_count + len(dataset['resources'])
else:
dataset_count = resource_count = 0
result_dump = json.dumps({'timestamp': time.strftime('%Y-%m-%d %H:%M', time.localtime()),
'result': result,
'dataset_count': dataset_count,
'resource_count': resource_count})
print(result_dump)
dump_file = join(expanduser('~'), 'shared', 'data-json-status', 'check', provider_id)
with codecs.open(dump_file, 'w', 'utf-8') as f:
f.write(result_dump)
save_harvest_result(provider_id, [])
if not result:
provider_last_json_file = join(provider_history_dir, 'last.json')
if check_last_json(current_json, provider_last_json_file) == 0:
print('no change')
save_harvest_result(provider_id, ['NO CHANGE SINCE LAST HARVEST, SKIPPED'])
return
timestamped_file = join(provider_timeline_dir, time.strftime('%Y%m%d-%H%M.json', time.localtime()))
with codecs.open(timestamped_file, 'w', 'utf-8') as f:
f.write(request.text)
last_dataset_list = [ f for f in listdir(provider_last_dataset_dir)
if isfile(join(provider_last_dataset_dir, f)) ]
result = []
for dataset in current_json:
internal_id = dataset['identifier']
if internal_id in last_dataset_list:
last_dataset_list.remove(internal_id)
dataset_result = process_dataset(provider_id, dataset, provider_last_dataset_dir)
if dataset_result is not None:
result.append(dataset_result)
# remove extra datasets
for internal_id in last_dataset_list:
dataset_id = '%s-%s' % (provider_id, internal_id)
delete_dataset(dataset_id)
remove(join(provider_last_dataset_dir, internal_id))
result.append('dataset "%s" deleted' % dataset_id)
print('dataset "%s" deleted' % dataset_id)
save_harvest_result(provider_id, result)
else:
print('validation failed, harvesting skipped')
save_harvest_result(provider_id, ['VALIDATION FAILED, SKIPPED'])
def update_ckan_stats():
drupal_shared_dir = '/usr/lib/ckan/drupal'
latest_dataset_count = 5
dataset_count = len(ckan['en'].action.package_list())
provider_count = len(ckan['en'].action.organization_list())
category_count = len(ckan['en'].action.group_list())
status_string = {}
status_string['en'] = u'<ul><li><a href="/en/data/dataset">%s</a> datasets</li><li><a href="/en/data/provider">%s</a> data providers</li><li><a href="/en/data/category">%s</a> data categories</li></ul>'
status_string['tc'] = u'<ul><li><a href="/tc/data/dataset">%s</a>\u500b\u6578\u64da\u96c6</li><li><a href="/tc/data/provider">%s</a>\u500b\u6578\u64da\u63d0\u4f9b\u6a5f\u69cb</li><li><a href="/tc/data/category">%s</a>\u500b\u6578\u64da\u5206\u985e</li></ul>'
status_string['sc'] = u'<ul><li><a href="/sc/data/dataset">%s</a>\u4e2a\u6570\u636e\u96c6</li><li><a href="/sc/data/provider">%s</a>\u4e2a\u6570\u636e\u63d0\u4f9b\u673a\u6784</li><li><a href="/sc/data/category">%s</a>\u4e2a\u6570\u636e\u5206\u7c7b</li></ul>'
for lang in lang_list:
with codecs.open('%s/ckan_status_%s.html' % (drupal_shared_dir, lang), 'w' 'utf-8') as f:
f.write(status_string[lang].encode('utf-8') % (dataset_count, provider_count, category_count))
latest_datasets = ckan[lang].action.package_search(sort='metadata_modified desc',
rows=latest_dataset_count)
with codecs.open('%s/latest_datasets_%s.html' % (drupal_shared_dir, lang), 'w' 'utf-8') as f:
f.write('<ul>')
for dataset in latest_datasets['results']:
f.write(u'<li><a href="/%s/data/dataset/%s">%s</a> (%s)</li>'.encode('utf-8')
% (lang.encode('utf-8'), dataset['name'].encode('utf-8'),
dataset['title'].encode('utf-8'),
dataset['metadata_modified'][:10].encode('utf-8')))
f.write('</ul>')
def main():
global history_dir, config, ckan, lang_list
base_dir = dirname(dirname(realpath(__file__)))
history_dir = join(base_dir, 'history')
config_file = join(expanduser('~'), 'shared', 'config', 'config.ini')
config = ConfigParser.ConfigParser()
config.read(config_file)
provider_urls = config_section_map('ProviderUrl')
api_keys = config_section_map('ApiKey')
lang_list = ['en', 'tc', 'sc']
ckan = {}
for lang in lang_list:
ckan[lang] = ckanapi.RemoteCKAN('http://localhost/%s/data/' % lang, apikey = api_keys[lang]);
for provider_id, provider_url in provider_urls.items():
process_provider(provider_id, provider_url)
update_ckan_stats();
if __name__ == '__main__':
main()
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('lizard_efcis', '0003_auto_20150209_1133'),
]
operations = [
migrations.AlterField(
model_name='opname',
name='waarde_a',
field=models.CharField(max_length=30, null=True, blank=True),
preserve_default=True,
),
]
|
"""
Copyright 2007-2011, 2016q Free Software Foundation, Inc.
This file is part of GNU Radio
SPDX-License-Identifier: GPL-2.0-or-later
"""
from __future__ import absolute_import
import ast
import functools
import random
from distutils.spawn import find_executable
from itertools import count
import six
from gi.repository import GLib, Gtk
from six.moves import filter
from . import colors
from .drawable import Drawable
from .connection import DummyConnection
from .. import Actions, Constants, Utils, Bars, Dialogs, MainWindow
from ..external_editor import ExternalEditor
from ...core import Messages
from ...core.FlowGraph import FlowGraph as CoreFlowgraph
class _ContextMenu(object):
"""
Help with drawing the right click context menu
"""
def __init__(self, main_window):
self._menu = Gtk.Menu.new_from_model(Bars.ContextMenu())
self._menu.attach_to_widget(main_window)
# In GTK 3.22 Menu.popup was deprecated, we want to popup at the
# pointer, so use that new function instead if we can.
if Gtk.check_version(3, 22, 0) is None:
self.popup = self._menu.popup_at_pointer
def popup(self, event):
self._menu.popup(None, None, None, None, event.button, event.time)
class FlowGraph(CoreFlowgraph, Drawable):
"""
FlowGraph is the data structure to store graphical signal blocks,
graphical inputs and outputs,
and the connections between inputs and outputs.
"""
def __init__(self, parent, **kwargs):
"""
FlowGraph constructor.
Create a list for signal blocks and connections. Connect mouse handlers.
"""
super(self.__class__, self).__init__(parent, **kwargs)
Drawable.__init__(self)
# We need to get the main window object so the context menu can be to the
# registered actions
app = Gtk.Application.get_default()
main_window = None
for window in app.get_windows():
if isinstance(window, MainWindow.MainWindow):
main_window = window
break
self.drawing_area = None
# important vars dealing with mouse event tracking
self.element_moved = False
self.mouse_pressed = False
self.press_coor = (0, 0)
# selected
self.selected_elements = set()
self._old_selected_port = None
self._new_selected_port = None
# current mouse hover element
self.element_under_mouse = None
# context menu
self._context_menu = _ContextMenu(main_window)
self.get_context_menu = lambda: self._context_menu
self._new_connection = None
self._elements_to_draw = []
self._external_updaters = {}
def _get_unique_id(self, base_id=""):
"""
Get a unique id starting with the base id.
Args:
base_id: the id starts with this and appends a count
Returns:
a unique id
"""
block_ids = set(b.name for b in self.blocks)
for index in count():
block_id = "{}_{}".format(base_id, index)
if block_id not in block_ids:
break
return block_id
def install_external_editor(self, param, parent=None):
target = (param.parent_block.name, param.key)
if target in self._external_updaters:
editor = self._external_updaters[target]
else:
config = self.parent_platform.config
editor = find_executable(config.editor) or Dialogs.choose_editor(
parent, config
) # todo: pass in parent
if not editor:
return
updater = functools.partial(
self.handle_external_editor_change, target=target
)
editor = self._external_updaters[target] = ExternalEditor(
editor=editor,
name=target[0],
value=param.get_value(),
callback=functools.partial(GLib.idle_add, updater),
)
editor.start()
try:
editor.open_editor()
except Exception as e:
# Problem launching the editor. Need to select a new editor.
Messages.send(
">>> Error opening an external editor. Please select a different editor.\n"
)
# Reset the editor to force the user to select a new one.
self.parent_platform.config.editor = ""
def handle_external_editor_change(self, new_value, target):
try:
block_id, param_key = target
self.get_block(block_id).params[param_key].set_value(new_value)
except (IndexError, ValueError): # block no longer exists
self._external_updaters[target].stop()
del self._external_updaters[target]
return
Actions.EXTERNAL_UPDATE()
def add_new_block(self, key, coor=None):
"""
Add a block of the given key to this flow graph.
Args:
key: the block key
coor: an optional coordinate or None for random
"""
id = self._get_unique_id(key)
scroll_pane = self.drawing_area.get_parent().get_parent()
# calculate the position coordinate
h_adj = scroll_pane.get_hadjustment()
v_adj = scroll_pane.get_vadjustment()
if coor is None:
coor = (
int(
random.uniform(0.25, 0.75) * h_adj.get_page_size()
+ h_adj.get_value()
),
int(
random.uniform(0.25, 0.75) * v_adj.get_page_size()
+ v_adj.get_value()
),
)
# get the new block
block = self.new_block(key)
block.coordinate = coor
block.params["id"].set_value(id)
Actions.ELEMENT_CREATE()
return id
def make_connection(self):
"""this selection and the last were ports, try to connect them"""
if self._new_connection and self._new_connection.has_real_sink:
self._old_selected_port = self._new_connection.source_port
self._new_selected_port = self._new_connection.sink_port
if self._old_selected_port and self._new_selected_port:
try:
self.connect(self._old_selected_port, self._new_selected_port)
Actions.ELEMENT_CREATE()
except Exception as e:
Messages.send_fail_connection(e)
self._old_selected_port = None
self._new_selected_port = None
return True
return False
def update(self):
"""
Call the top level rewrite and validate.
Call the top level create labels and shapes.
"""
self.rewrite()
self.validate()
self.update_elements_to_draw()
self.create_labels()
self.create_shapes()
def reload(self):
"""
Reload flow-graph (with updated blocks)
Args:
page: the page to reload (None means current)
Returns:
False if some error occurred during import
"""
success = False
data = self.export_data()
if data:
self.unselect()
success = self.import_data(data)
self.update()
return success
###########################################################################
# Copy Paste
###########################################################################
def copy_to_clipboard(self):
"""
Copy the selected blocks and connections into the clipboard.
Returns:
the clipboard
"""
# get selected blocks
blocks = list(self.selected_blocks())
if not blocks:
return None
# calc x and y min
x_min, y_min = blocks[0].coordinate
for block in blocks:
x, y = block.coordinate
x_min = min(x, x_min)
y_min = min(y, y_min)
# get connections between selected blocks
connections = list(
filter(
lambda c: c.source_block in blocks and c.sink_block in blocks,
self.connections,
)
)
clipboard = (
(x_min, y_min),
[block.export_data() for block in blocks],
[connection.export_data() for connection in connections],
)
return clipboard
def paste_from_clipboard(self, clipboard):
"""
Paste the blocks and connections from the clipboard.
Args:
clipboard: the nested data of blocks, connections
"""
# todo: rewrite this...
selected = set()
(x_min, y_min), blocks_n, connections_n = clipboard
old_id2block = dict()
# recalc the position
scroll_pane = self.drawing_area.get_parent().get_parent()
h_adj = scroll_pane.get_hadjustment()
v_adj = scroll_pane.get_vadjustment()
x_off = h_adj.get_value() - x_min + h_adj.get_page_size() / 4
y_off = v_adj.get_value() - y_min + v_adj.get_page_size() / 4
if len(self.get_elements()) <= 1:
x_off, y_off = 0, 0
# create blocks
for block_n in blocks_n:
block_key = block_n.get("id")
if block_key == "options":
continue
block_name = block_n.get("name")
# Verify whether a block with this name exists before adding it
if block_name in (blk.name for blk in self.blocks):
block_name = self._get_unique_id(block_name)
block_n["name"] = block_name
block = self.new_block(block_key)
if not block:
continue # unknown block was pasted (e.g. dummy block)
selected.add(block)
block.import_data(**block_n)
old_id2block[block.params["id"].value] = block
# move block to offset coordinate
block.move((x_off, y_off))
# TODO: prevent block from being pasted directly on top of another block
# update before creating connections
self.update()
# create connections
for connection_n in connections_n:
source = old_id2block[connection_n[0]].get_source(connection_n[1])
sink = old_id2block[connection_n[2]].get_sink(connection_n[3])
connection = self.connect(source, sink)
selected.add(connection)
self.selected_elements = selected
###########################################################################
# Modify Selected
###########################################################################
def type_controller_modify_selected(self, direction):
"""
Change the registered type controller for the selected signal blocks.
Args:
direction: +1 or -1
Returns:
true for change
"""
return any(
sb.type_controller_modify(direction) for sb in self.selected_blocks()
)
def port_controller_modify_selected(self, direction):
"""
Change port controller for the selected signal blocks.
Args:
direction: +1 or -1
Returns:
true for changed
"""
return any(
sb.port_controller_modify(direction) for sb in self.selected_blocks()
)
def change_state_selected(self, new_state):
"""
Enable/disable the selected blocks.
Args:
new_state: a block state
Returns:
true if changed
"""
changed = False
for block in self.selected_blocks():
changed |= block.state != new_state
block.state = new_state
return changed
def move_selected(self, delta_coordinate):
"""
Move the element and by the change in coordinates.
Args:
delta_coordinate: the change in coordinates
"""
for selected_block in self.selected_blocks():
selected_block.move(delta_coordinate)
self.element_moved = True
def align_selected(self, calling_action=None):
"""
Align the selected blocks.
Args:
calling_action: the action initiating the alignment
Returns:
True if changed, otherwise False
"""
blocks = list(self.selected_blocks())
if calling_action is None or not blocks:
return False
# compute common boundary of selected objects
min_x, min_y = max_x, max_y = blocks[0].coordinate
for selected_block in blocks:
x, y = selected_block.coordinate
min_x, min_y = min(min_x, x), min(min_y, y)
x += selected_block.width
y += selected_block.height
max_x, max_y = max(max_x, x), max(max_y, y)
ctr_x, ctr_y = (max_x + min_x) / 2, (max_y + min_y) / 2
# align the blocks as requested
transform = {
Actions.BLOCK_VALIGN_TOP: lambda x, y, w, h: (x, min_y),
Actions.BLOCK_VALIGN_MIDDLE: lambda x, y, w, h: (x, ctr_y - h / 2),
Actions.BLOCK_VALIGN_BOTTOM: lambda x, y, w, h: (x, max_y - h),
Actions.BLOCK_HALIGN_LEFT: lambda x, y, w, h: (min_x, y),
Actions.BLOCK_HALIGN_CENTER: lambda x, y, w, h: (ctr_x - w / 2, y),
Actions.BLOCK_HALIGN_RIGHT: lambda x, y, w, h: (max_x - w, y),
}.get(calling_action, lambda *args: args)
for selected_block in blocks:
x, y = selected_block.coordinate
w, h = selected_block.width, selected_block.height
selected_block.coordinate = transform(x, y, w, h)
return True
def rotate_selected(self, rotation):
"""
Rotate the selected blocks by multiples of 90 degrees.
Args:
rotation: the rotation in degrees
Returns:
true if changed, otherwise false.
"""
if not any(self.selected_blocks()):
return False
# initialize min and max coordinates
min_x, min_y = max_x, max_y = self.selected_block.coordinate
# rotate each selected block, and find min/max coordinate
for selected_block in self.selected_blocks():
selected_block.rotate(rotation)
# update the min/max coordinate
x, y = selected_block.coordinate
min_x, min_y = min(min_x, x), min(min_y, y)
max_x, max_y = max(max_x, x), max(max_y, y)
# calculate center point of selected blocks
ctr_x, ctr_y = (max_x + min_x) / 2, (max_y + min_y) / 2
# rotate the blocks around the center point
for selected_block in self.selected_blocks():
x, y = selected_block.coordinate
x, y = Utils.get_rotated_coordinate((x - ctr_x, y - ctr_y), rotation)
selected_block.coordinate = (x + ctr_x, y + ctr_y)
return True
def remove_selected(self):
"""
Remove selected elements
Returns:
true if changed.
"""
changed = False
for selected_element in self.selected_elements:
self.remove_element(selected_element)
changed = True
return changed
def update_selected(self):
"""
Remove deleted elements from the selected elements list.
Update highlighting so only the selected are highlighted.
"""
selected_elements = self.selected_elements
elements = self.get_elements()
# remove deleted elements
for selected in list(selected_elements):
if selected in elements:
continue
selected_elements.remove(selected)
if self._old_selected_port and self._old_selected_port.parent not in elements:
self._old_selected_port = None
if self._new_selected_port and self._new_selected_port.parent not in elements:
self._new_selected_port = None
# update highlighting
for element in elements:
element.highlighted = element in selected_elements
###########################################################################
# Draw stuff
###########################################################################
def update_elements_to_draw(self):
hide_disabled_blocks = Actions.TOGGLE_HIDE_DISABLED_BLOCKS.get_active()
hide_variables = Actions.TOGGLE_HIDE_VARIABLES.get_active()
def draw_order(elem):
return elem.highlighted, elem.is_block, elem.enabled
elements = sorted(self.get_elements(), key=draw_order)
del self._elements_to_draw[:]
for element in elements:
if hide_disabled_blocks and not element.enabled:
continue # skip hidden disabled blocks and connections
if hide_variables and (element.is_variable or element.is_import):
continue # skip hidden disabled blocks and connections
self._elements_to_draw.append(element)
def create_labels(self, cr=None):
for element in self._elements_to_draw:
element.create_labels(cr)
def create_shapes(self):
# TODO - this is a workaround for bus ports not having a proper coordinate
# until the shape is drawn. The workaround is to draw blocks before connections
for element in filter(lambda x: x.is_block, self._elements_to_draw):
element.create_shapes()
for element in filter(lambda x: not x.is_block, self._elements_to_draw):
element.create_shapes()
def _drawables(self):
# todo: cache that
show_comments = Actions.TOGGLE_SHOW_BLOCK_COMMENTS.get_active()
for element in self._elements_to_draw:
if element.is_block and show_comments and element.enabled:
yield element.draw_comment
if self._new_connection is not None:
yield self._new_connection.draw
for element in self._elements_to_draw:
yield element.draw
def draw(self, cr):
"""Draw blocks connections comment and select rectangle"""
for draw_element in self._drawables():
cr.save()
draw_element(cr)
cr.restore()
draw_multi_select_rectangle = (
self.mouse_pressed
and (not self.selected_elements or self.drawing_area.ctrl_mask)
and not self._new_connection
)
if draw_multi_select_rectangle:
x1, y1 = self.press_coor
x2, y2 = self.coordinate
x, y = int(min(x1, x2)), int(min(y1, y2))
w, h = int(abs(x1 - x2)), int(abs(y1 - y2))
cr.set_source_rgba(
colors.HIGHLIGHT_COLOR[0],
colors.HIGHLIGHT_COLOR[1],
colors.HIGHLIGHT_COLOR[2],
0.5,
)
cr.rectangle(x, y, w, h)
cr.fill()
cr.rectangle(x, y, w, h)
cr.stroke()
##########################################################################
# selection handling
##########################################################################
def update_selected_elements(self):
"""
Update the selected elements.
The update behavior depends on the state of the mouse button.
When the mouse button pressed the selection will change when
the control mask is set or the new selection is not in the current group.
When the mouse button is released the selection will change when
the mouse has moved and the control mask is set or the current group is empty.
Attempt to make a new connection if the old and ports are filled.
If the control mask is set, merge with the current elements.
"""
selected_elements = None
if self.mouse_pressed:
new_selections = self.what_is_selected(self.coordinate)
# update the selections if the new selection is not in the current selections
# allows us to move entire selected groups of elements
if not new_selections:
selected_elements = set()
elif self.drawing_area.ctrl_mask or self.selected_elements.isdisjoint(
new_selections
):
selected_elements = new_selections
if self._old_selected_port:
self._old_selected_port.force_show_label = False
self.create_shapes()
self.drawing_area.queue_draw()
elif self._new_selected_port:
self._new_selected_port.force_show_label = True
else: # called from a mouse release
if (
not self.element_moved
and (not self.selected_elements or self.drawing_area.ctrl_mask)
and not self._new_connection
):
selected_elements = self.what_is_selected(
self.coordinate, self.press_coor
)
# this selection and the last were ports, try to connect them
if self.make_connection():
return
# update selected elements
if selected_elements is None:
return
# if ctrl, set the selected elements to the union - intersection of old and new
if self.drawing_area.ctrl_mask:
self.selected_elements ^= selected_elements
else:
self.selected_elements.clear()
self.selected_elements.update(selected_elements)
Actions.ELEMENT_SELECT()
def what_is_selected(self, coor, coor_m=None):
"""
What is selected?
At the given coordinate, return the elements found to be selected.
If coor_m is unspecified, return a list of only the first element found to be selected:
Iterate though the elements backwards since top elements are at the end of the list.
If an element is selected, place it at the end of the list so that is is drawn last,
and hence on top. Update the selected port information.
Args:
coor: the coordinate of the mouse click
coor_m: the coordinate for multi select
Returns:
the selected blocks and connections or an empty list
"""
selected_port = None
selected = set()
# check the elements
for element in reversed(self._elements_to_draw):
selected_element = element.what_is_selected(coor, coor_m)
if not selected_element:
continue
# update the selected port information
if selected_element.is_port:
if not coor_m:
selected_port = selected_element
selected_element = selected_element.parent_block
selected.add(selected_element)
if not coor_m:
break
if selected_port and selected_port.is_source:
selected.remove(selected_port.parent_block)
self._new_connection = DummyConnection(selected_port, coordinate=coor)
self.drawing_area.queue_draw()
# update selected ports
if selected_port is not self._new_selected_port:
self._old_selected_port = self._new_selected_port
self._new_selected_port = selected_port
return selected
def unselect(self):
"""
Set selected elements to an empty set.
"""
self.selected_elements.clear()
def select_all(self):
"""Select all blocks in the flow graph"""
self.selected_elements.clear()
self.selected_elements.update(self._elements_to_draw)
def selected_blocks(self):
"""
Get a group of selected blocks.
Returns:
sub set of blocks in this flow graph
"""
return (e for e in self.selected_elements if e.is_block)
@property
def selected_block(self):
"""
Get the selected block when a block or port is selected.
Returns:
a block or None
"""
return next(self.selected_blocks(), None)
def get_selected_elements(self):
"""
Get the group of selected elements.
Returns:
sub set of elements in this flow graph
"""
return self.selected_elements
def get_selected_element(self):
"""
Get the selected element.
Returns:
a block, port, or connection or None
"""
return next(iter(self.selected_elements), None)
##########################################################################
# Event Handlers
##########################################################################
def handle_mouse_context_press(self, coordinate, event):
"""
The context mouse button was pressed:
If no elements were selected, perform re-selection at this coordinate.
Then, show the context menu at the mouse click location.
"""
selections = self.what_is_selected(coordinate)
if not selections.intersection(self.selected_elements):
self.coordinate = coordinate
self.mouse_pressed = True
self.update_selected_elements()
self.mouse_pressed = False
if self._new_connection:
self._new_connection = None
self.drawing_area.queue_draw()
self._context_menu.popup(event)
def handle_mouse_selector_press(self, double_click, coordinate):
"""
The selector mouse button was pressed:
Find the selected element. Attempt a new connection if possible.
Open the block params window on a double click.
Update the selection state of the flow graph.
"""
self.press_coor = coordinate
self.coordinate = coordinate
self.mouse_pressed = True
if double_click:
self.unselect()
self.update_selected_elements()
if double_click and self.selected_block:
self.mouse_pressed = False
Actions.BLOCK_PARAM_MODIFY()
def handle_mouse_selector_release(self, coordinate):
"""
The selector mouse button was released:
Update the state, handle motion (dragging).
And update the selected flowgraph elements.
"""
self.coordinate = coordinate
self.mouse_pressed = False
if self.element_moved:
Actions.BLOCK_MOVE()
self.element_moved = False
self.update_selected_elements()
if self._new_connection:
self._new_connection = None
self.drawing_area.queue_draw()
def handle_mouse_motion(self, coordinate):
"""
The mouse has moved, respond to mouse dragging or notify elements
Move a selected element to the new coordinate.
Auto-scroll the scroll bars at the boundaries.
"""
# to perform a movement, the mouse must be pressed
# (no longer checking pending events via Gtk.events_pending() - always true in Windows)
redraw = False
if not self.mouse_pressed or self._new_connection:
redraw = self._handle_mouse_motion_move(coordinate)
if self.mouse_pressed:
redraw = redraw or self._handle_mouse_motion_drag(coordinate)
if redraw:
self.drawing_area.queue_draw()
def _handle_mouse_motion_move(self, coordinate):
# only continue if mouse-over stuff is enabled (just the auto-hide port label stuff for now)
redraw = False
for element in self._elements_to_draw:
over_element = element.what_is_selected(coordinate)
if not over_element:
continue
if over_element != self.element_under_mouse: # over sth new
if self.element_under_mouse:
redraw |= self.element_under_mouse.mouse_out() or False
self.element_under_mouse = over_element
redraw |= over_element.mouse_over() or False
break
else:
if self.element_under_mouse:
redraw |= self.element_under_mouse.mouse_out() or False
self.element_under_mouse = None
if not Actions.TOGGLE_AUTO_HIDE_PORT_LABELS.get_active():
return
if redraw:
# self.create_labels()
self.create_shapes()
return redraw
def _handle_mouse_motion_drag(self, coordinate):
redraw = False
# remove the connection if selected in drag event
if (
len(self.selected_elements) == 1
and self.get_selected_element().is_connection
):
Actions.ELEMENT_DELETE()
redraw = True
if self._new_connection:
e = self.element_under_mouse
if e and e.is_port and e.is_sink:
self._new_connection.update(sink_port=self.element_under_mouse)
else:
self._new_connection.update(coordinate=coordinate, rotation=0)
return True
# move the selected elements and record the new coordinate
x, y = coordinate
if not self.drawing_area.ctrl_mask:
X, Y = self.coordinate
dX, dY = int(x - X), int(y - Y)
active = (
Actions.TOGGLE_SNAP_TO_GRID.get_active() or self.drawing_area.mod1_mask
)
if (
not active
or abs(dX) >= Constants.CANVAS_GRID_SIZE
or abs(dY) >= Constants.CANVAS_GRID_SIZE
):
self.move_selected((dX, dY))
self.coordinate = (x, y)
redraw = True
return redraw
def get_extents(self):
show_comments = Actions.TOGGLE_SHOW_BLOCK_COMMENTS.get_active()
def sub_extents():
for element in self._elements_to_draw:
yield element.get_extents()
if element.is_block and show_comments and element.enabled:
yield element.get_extents_comment()
extent = 10000000, 10000000, 0, 0
cmps = (min, min, max, max)
for sub_extent in sub_extents():
extent = [cmp(xy, e_xy) for cmp, xy, e_xy in zip(cmps, extent, sub_extent)]
return tuple(extent)
|
from zope.interface import implementer
from ..interfaces import (
IResourceType,
)
from ..resources import (
ResourceTypeBase,
)
from ..lib.utils.common_utils import translate as _
@implementer(IResourceType)
class AssignsResource(ResourceTypeBase):
__name__ = 'assigns'
@property
def allowed_permisions(self):
return [
('assign', _(u'assign')),
]
@property
def allowed_scopes(self):
return False
|
import numpy as np
xyz2zxy = np.array([[0., 1., 0., 0.],
[0., 0., 1., 0.],
[1., 0., 0., 0.],
[0., 0., 0., 1.]])
|
import json
import time
from charitybot2.configurations.event_configuration import EventConfiguration
from charitybot2.exceptions import IllegalArgumentException
from charitybot2.models.donation import Donation
from charitybot2.models.event import NonExistentEventException
from charitybot2.sources.url_call import UrlCall
from type_assertions import accept_types
class PrivateApiCalls:
def __init__(self, base_api_url, timeout=2):
self._timeout = timeout
self._base_api_url = base_api_url + 'api/v1/'
def __validate_event_identifier(self, event_identifier):
if ' ' in event_identifier:
raise NonExistentEventException('Event identifiers cannot contain spaces')
if not self.get_event_existence(identifier=event_identifier):
raise NonExistentEventException('Event with identifier {} does not exist'.format(event_identifier))
def get_index(self):
return json.loads(UrlCall(url=self._base_api_url, timeout=self._timeout).get().content.decode('utf-8'))
@accept_types(object, str)
def get_event_existence(self, identifier):
url = self._base_api_url + 'event/exists/{}/'.format(identifier)
decoded_content = UrlCall(url=url, timeout=self._timeout).get().content.decode('utf-8')
return json.loads(decoded_content)['event_exists']
@accept_types(object, str)
def get_event_info(self, identifier):
url = self._base_api_url + 'event/{}'.format(identifier)
decoded_content = UrlCall(url=url, timeout=self._timeout).get().content.decode('utf-8')
content = json.loads(decoded_content)
if 'message' in content.keys():
raise NonExistentEventException('Event with identifier {} does not exist'.format(identifier))
return content
@accept_types(object, EventConfiguration)
def register_event(self, event_configuration):
url = self._base_api_url + 'event/'
response = UrlCall(url=url, timeout=self._timeout).post(data=event_configuration.configuration_values)
decoded_content = response.content.decode('utf-8')
return json.loads(decoded_content)['registration_successful']
@accept_types(object, EventConfiguration)
def update_event(self, event_configuration):
url = self._base_api_url + 'event/'
response = UrlCall(url=url, timeout=self._timeout).post(data=event_configuration.configuration_values)
decoded_content = response.content.decode('utf-8')
return json.loads(decoded_content)['update_successful']
@accept_types(object, str, str, (int, float))
def send_heartbeat(self, source, state, timestamp=None):
if timestamp is None or not isinstance(timestamp, int):
timestamp = int(time.time())
data = dict(state=state, source=source, timestamp=timestamp)
url = self._base_api_url + 'heartbeat/'
response = UrlCall(url=url, timeout=self._timeout).post(data=data)
decoded_content = response.content.decode('utf-8')
return json.loads(decoded_content)['received']
# Disabled due to assertion check not working properly for this specific method
# @accept_types(object, Donation)
def register_donation(self, donation):
url = self._base_api_url + 'donation/'
response = UrlCall(url=url, timeout=self._timeout).post(data=donation.to_dict())
decoded_content = response.content.decode('utf-8')
return json.loads(decoded_content)['received']
def get_event_donations(self, event_identifier, time_bounds=(), limit=None):
self.__validate_event_identifier(event_identifier=event_identifier)
params_added = False
# TODO: Rewrite to properly use query parameters built into requests instead of manually concatenating
url = self._base_api_url + 'event/{}/donations/'.format(event_identifier)
if len(time_bounds) == 2:
lower_bound, upper_bound = time_bounds[0], time_bounds[1]
if not isinstance(lower_bound, int) or not isinstance(upper_bound, int):
raise IllegalArgumentException('Time bounds must be a tuple of 2 integers')
url += '?lower={}&upper={}'.format(lower_bound, upper_bound)
params_added = True
if limit is not None:
if limit <= 0:
raise IllegalArgumentException('Limit must be 1 or more')
url += '&' if params_added else '?'
url += 'limit={}'.format(limit)
params_added = True
response = UrlCall(url=url, timeout=self._timeout).get()
decoded_content = response.content.decode('utf-8')
converted_content = json.loads(decoded_content)['donations']
if isinstance(converted_content, list):
donations = [Donation.from_json(donation) for donation in converted_content]
else:
donations = Donation.from_json(converted_content)
return donations
def get_last_event_donation(self, event_identifier):
return self.get_event_donations(event_identifier=event_identifier, limit=1)
def get_event_total(self, event_identifier):
self.__validate_event_identifier(event_identifier=event_identifier)
url = self._base_api_url + 'event/{}/total/'.format(event_identifier)
response = UrlCall(url=url, timeout=self._timeout).get()
decoded_content = response.content.decode('utf-8')
return float(json.loads(decoded_content)['total'])
# TODO: Write tests for this method
def update_event_total(self, event_identifier, total):
self.__validate_event_identifier(event_identifier=event_identifier)
url = self._base_api_url + 'event/{}/total/'.format(event_identifier)
response = UrlCall(url=url, timeout=self._timeout).post(data={'total': total})
decoded_content = response.content.decode('utf-8')
return json.loads(decoded_content)['update_successful']
def get_latest_event_donation(self, event_identifier):
self.__validate_event_identifier(event_identifier=event_identifier)
url = self._base_api_url + 'event/{}/donations/largest'.format(event_identifier)
response = UrlCall(url=url, timeout=self._timeout).get()
decoded_content = response.content.decode('utf-8')
converted_content = json.loads(decoded_content)
return Donation.from_dict(converted_content)
def get_donation_count(self, event_identifier):
self.__validate_event_identifier(event_identifier=event_identifier)
url = self._base_api_url + 'event/{}/donations/count'.format(event_identifier)
response = UrlCall(url=url, timeout=self._timeout).get()
decoded_content = response.content.decode('utf-8')
converted_content = json.loads(decoded_content)
return int(converted_content['count'])
def get_time_bound_donation_count(self, event_identifier, lower_time_bound, upper_time_bound):
self.__validate_event_identifier(event_identifier=event_identifier)
if not isinstance(lower_time_bound, int) or not isinstance(upper_time_bound, int):
raise IllegalArgumentException('Time bounds must be integers')
if not upper_time_bound > lower_time_bound or (lower_time_bound < 0 or upper_time_bound < 0):
raise IllegalArgumentException('Time bounds must be positive integers with upper larger than lower')
url = self._base_api_url + 'event/{}/donations/count?lower={}&upper={}'.format(
event_identifier,
lower_time_bound,
upper_time_bound)
response = UrlCall(url=url, timeout=self._timeout).get()
decoded_content = response.content.decode('utf-8')
converted_content = json.loads(decoded_content)
return int(converted_content['count'])
def get_average_donation_amount(self, event_identifier):
self.__validate_event_identifier(event_identifier=event_identifier)
url = self._base_api_url + 'event/{}/donations/average'.format(
event_identifier
)
response = UrlCall(url=url, timeout=self._timeout).get()
decoded_content = response.content.decode('utf-8')
converted_content = json.loads(decoded_content)
return round(float(converted_content['average_donation_amount']), 3)
def get_donation_distribution(self, event_identifier):
self.__validate_event_identifier(event_identifier=event_identifier)
url = self._base_api_url + 'event/{}/donations/distribution'.format(
event_identifier
)
response = UrlCall(url=url, timeout=self._timeout).get()
decoded_content = response.content.decode('utf-8')
converted_content = json.loads(decoded_content)
return converted_content['distribution']
def get_all_events(self):
url = self._base_api_url + 'events'
response = UrlCall(url=url, timeout=self._timeout).get()
decoded_content = response.content.decode('utf-8')
converted_content = json.loads(decoded_content)
return [
EventConfiguration(configuration_values=config)
for config
in converted_content['events']
]
|
from optparse import OptionParser
from os.path import basename, splitext
from sys import exit, stdout
from lpod import __version__
from lpod.document import odf_new_document_from_type, odf_get_document
from lpod.toc import odf_create_toc
from lpod.vfs import vfs
from lpod.table import import_from_csv
def init_doc(mimetype):
# Text mode
if mimetype == "application/vnd.oasis.opendocument.text":
output_doc = odf_new_document_from_type("text")
# Begin with a TOC
output_body = output_doc.get_body()
output_body.append_element(odf_create_toc())
# Spreadsheet mode
elif mimetype in ("application/vnd.oasis.opendocument.spreadsheet",
"text/csv"):
output_doc = odf_new_document_from_type("spreadsheet")
# Presentation mode
else:
output_doc = odf_new_document_from_type("presentation")
return output_doc
def _add_pictures(document, output_doc):
# Copy extra parts (images...)
container = document.container
for partname in container.get_contents():
if partname.startswith('Pictures/'):
data = container.get_part(partname)
# Suppose uniqueness
output_doc.container.set_part(partname, data)
def add_odt(filename, output_doc):
document = odf_get_document(filename)
# Copy content
src_body = document.get_body()
output_body = output_doc.get_body()
for element in src_body.get_children():
tagname = element.get_tagname()
# Skip TOC, etc.
if tagname in ('text:sequence-decls', 'text:table-of-content'):
continue
# Copy the rest recursively
output_body.append_element(element.clone())
# Add pictures/
_add_pictures(document, output_doc)
# TODO embedded objects
print 'Add "%s"' % filename
def _get_table_name(name, output_body):
if isinstance(name, str):
encoding = stdout.encoding or 'utf8'
name = unicode(name, encoding)
already_names = set([ table.get_table_name()
for table in output_body.get_table_list() ])
if name in already_names:
i = 1
while True:
new_name = u"%s_%d" % (name, i)
if new_name not in already_names:
return new_name
i += 1
else:
return name
def add_ods(filename, output_doc):
document = odf_get_document(filename)
# Add the sheets
output_body = output_doc.get_body()
ods_body = document.get_body()
for table in ods_body.get_table_list():
name = table.get_table_name()
name = _get_table_name(name, output_body)
table.set_table_name(name)
output_body.append_element(table)
# Add pictures/
_add_pictures(document, output_doc)
print 'Add "%s"' % filename
def add_csv(filename, output_doc):
output_body = output_doc.get_body()
# Make the name
name = splitext(basename(filename))[0]
name = _get_table_name(name, output_body)
table = import_from_csv(filename, name)
output_body.append_element(table)
print 'Add "%s"' % filename
def add_odp(filename, output_doc):
document = odf_get_document(filename)
# Add the pages
output_body = output_doc.get_body()
already_names = set([ page.get_page_name()
for page in output_body.get_draw_page_list() ])
odp_body = document.get_body()
for page in odp_body.get_draw_page_list():
name = page.get_page_name()
if name in already_names:
i = 1
while True:
new_name = u"%s_%d" % (name, i)
if new_name not in already_names:
name = new_name
break
i += 1
page.set_page_name(name)
already_names.add(name)
output_body.append_element(page)
# Add pictures/
_add_pictures(document, output_doc)
print 'Add "%s"' % filename
if __name__ == '__main__':
# Options initialisation
usage = "%prog <file1> [<file2> ...]"
description = "Merge all input files in an unique OpenDocument file"
parser = OptionParser(usage, version=__version__,
description=description)
# --output
parser.add_option('-o', '--output', action='store', type='string',
dest='output', metavar='FILE', default=None,
help="Place output in file FILE (out.od[t|s|p] by default)")
# Parse !
opts, filenames = parser.parse_args()
# Arguments
if not filenames:
parser.print_help()
exit(1)
output_filename = opts.output
output_doc = None
# Concatenate content in the output doc
for filename in filenames:
# Exists ?
if not vfs.exists(filename):
print "Skip", filename, "not existing"
continue
# A good file => Only text, spreadsheet and CSV
mimetype = vfs.get_mimetype(filename)
if mimetype not in ("application/vnd.oasis.opendocument.text",
"application/vnd.oasis.opendocument.spreadsheet",
"text/csv",
"application/vnd.oasis.opendocument.presentation"):
print 'Skip "%s" with mimetype "%s"' % (filename, mimetype)
continue
# Not yet an output_doc ?
if output_doc is None:
# Create an empty doc
output_doc = init_doc(mimetype)
output_mimetype = output_doc.get_type()
print '%s documents detected' % output_mimetype.title()
# Make the filename
if output_filename is None:
output_filename = "out.od%s" % output_mimetype[0]
if vfs.exists(output_filename):
vfs.remove(output_filename)
# Add a text doc
if mimetype == "application/vnd.oasis.opendocument.text":
if output_mimetype != "text":
print "We cannot merge a mix of text/spreadsheet/presentation!"
exit(1)
add_odt(filename, output_doc)
# Add a spreadsheet doc
elif mimetype in ("application/vnd.oasis.opendocument.spreadsheet",
"text/csv"):
if output_mimetype != "spreadsheet":
print "We cannot merge a mix of text/spreadsheet/presentation!"
exit(1)
# CSV ?
if mimetype == "text/csv":
add_csv(filename, output_doc)
else:
add_ods(filename, output_doc)
# Add a presentation doc
else:
if output_mimetype != "presentation":
print "We cannot merge a mix of text/spreadsheet/presentation!"
exit(1)
add_odp(filename, output_doc)
# Extra for odt
if output_mimetype == 'text':
output_body = output_doc.get_body()
toc = output_body.get_toc()
toc.toc_fill()
# Save
if output_doc is not None:
output_doc.save(output_filename, pretty=True)
print 'Document "%s" generated' % output_filename
else:
print "Nothing to save, ..."
|
"""
Test CF-NetCDF file loading and saving.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
import six
import iris.tests as tests
import os
import os.path
import shutil
import stat
import tempfile
import netCDF4 as nc
import numpy as np
import numpy.ma as ma
import iris
import iris.analysis.trajectory
import iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc as pyke_rules
import iris.fileformats.netcdf
import iris.std_names
import iris.util
import iris.coord_systems as icoord_systems
from iris.tests import mock
import iris.tests.stock as stock
from iris._lazy_data import is_lazy_data
@tests.skip_data
class TestNetCDFLoad(tests.IrisTest):
def test_monotonic(self):
cubes = iris.load(tests.get_data_path(
('NetCDF', 'testing', 'test_monotonic_coordinate.nc')))
cubes = sorted(cubes, key=lambda cube: cube.var_name)
self.assertCML(cubes, ('netcdf', 'netcdf_monotonic.cml'))
def test_load_global_xyt_total(self):
# Test loading single xyt CF-netCDF file.
cube = iris.load_cube(
tests.get_data_path(('NetCDF', 'global', 'xyt',
'SMALL_total_column_co2.nc')))
self.assertCML(cube, ('netcdf', 'netcdf_global_xyt_total.cml'))
def test_load_global_xyt_hires(self):
# Test loading another single xyt CF-netCDF file.
cube = iris.load_cube(tests.get_data_path(
('NetCDF', 'global', 'xyt', 'SMALL_hires_wind_u_for_ipcc4.nc')))
self.assertCML(cube, ('netcdf', 'netcdf_global_xyt_hires.cml'))
def test_missing_time_bounds(self):
# Check we can cope with a missing bounds variable.
with self.temp_filename(suffix='nc') as filename:
# Tweak a copy of the test data file to rename (we can't delete)
# the time bounds variable.
src = tests.get_data_path(('NetCDF', 'global', 'xyt',
'SMALL_hires_wind_u_for_ipcc4.nc'))
shutil.copyfile(src, filename)
dataset = nc.Dataset(filename, mode='a')
dataset.renameVariable('time_bnds', 'foo')
dataset.close()
cube = iris.load_cube(filename, 'eastward_wind')
def test_load_global_xyzt_gems(self):
# Test loading single xyzt CF-netCDF file (multi-cube).
cubes = iris.load(tests.get_data_path(('NetCDF', 'global', 'xyz_t',
'GEMS_CO2_Apr2006.nc')))
cubes = sorted(cubes, key=lambda cube: cube.name())
self.assertCML(cubes, ('netcdf', 'netcdf_global_xyzt_gems.cml'))
# Check the masked array fill value is propogated through the data
# manager loading.
lnsp = cubes[1]
self.assertTrue(ma.isMaskedArray(lnsp.data))
self.assertEqual(-32767.0, lnsp.data.fill_value)
def test_load_global_xyzt_gems_iter(self):
# Test loading stepped single xyzt CF-netCDF file (multi-cube).
for i, cube in enumerate(sorted(
iris.load(
tests.get_data_path(('NetCDF', 'global', 'xyz_t',
'GEMS_CO2_Apr2006.nc'))),
key=lambda cube: cube.name())):
self.assertCML(cube, ('netcdf',
'netcdf_global_xyzt_gems_iter_%d.cml' % i))
def test_load_rotated_xy_land(self):
# Test loading single xy rotated pole CF-netCDF file.
cube = iris.load_cube(tests.get_data_path(
('NetCDF', 'rotated', 'xy', 'rotPole_landAreaFraction.nc')))
# Make sure the AuxCoords have lazy data.
self.assertTrue(is_lazy_data(cube.coord('latitude').core_points()))
self.assertCML(cube, ('netcdf', 'netcdf_rotated_xy_land.cml'))
def test_load_rotated_xyt_precipitation(self):
# Test loading single xyt rotated pole CF-netCDF file.
cube = iris.load_cube(
tests.get_data_path(('NetCDF', 'rotated', 'xyt',
'small_rotPole_precipitation.nc')))
self.assertCML(cube, ('netcdf',
'netcdf_rotated_xyt_precipitation.cml'))
def test_load_tmerc_grid_and_clim_bounds(self):
# Test loading a single CF-netCDF file with a transverse Mercator
# grid_mapping and a time variable with climatology.
cube = iris.load_cube(
tests.get_data_path(('NetCDF', 'transverse_mercator',
'tmean_1910_1910.nc')))
self.assertCML(cube, ('netcdf', 'netcdf_tmerc_and_climatology.cml'))
def test_load_tmerc_grid_with_projection_origin(self):
# Test loading a single CF-netCDF file with a transverse Mercator
# grid_mapping that uses longitude_of_projection_origin and
# scale_factor_at_projection_origin instead of
# longitude_of_central_meridian and scale_factor_at_central_meridian.
cube = iris.load_cube(
tests.get_data_path(('NetCDF', 'transverse_mercator',
'projection_origin_attributes.nc')))
expected = icoord_systems.TransverseMercator(
latitude_of_projection_origin=49.0,
longitude_of_central_meridian=-2.0,
false_easting=400000.0,
false_northing=-100000.0,
scale_factor_at_central_meridian=0.9996012717,
ellipsoid=icoord_systems.GeogCS(
semi_major_axis=6377563.396, semi_minor_axis=6356256.91))
self.assertEqual(cube.coord('projection_x_coordinate').coord_system,
expected)
self.assertEqual(cube.coord('projection_y_coordinate').coord_system,
expected)
def test_load_lcc_grid(self):
# Test loading a single CF-netCDF file with Lambert conformal conic
# grid mapping.
cube = iris.load_cube(
tests.get_data_path(('NetCDF', 'lambert_conformal',
'test_lcc.nc')))
self.assertCML(cube, ('netcdf', 'netcdf_lcc.cml'))
def test_missing_climatology(self):
# Check we can cope with a missing climatology variable.
with self.temp_filename(suffix='nc') as filename:
# Tweak a copy of the test data file to rename (we can't delete)
# the climatology variable.
src = tests.get_data_path(('NetCDF', 'transverse_mercator',
'tmean_1910_1910.nc'))
shutil.copyfile(src, filename)
dataset = nc.Dataset(filename, mode='a')
dataset.renameVariable('climatology_bounds', 'foo')
dataset.close()
cube = iris.load_cube(filename, 'Mean temperature')
def test_load_merc_grid(self):
# Test loading a single CF-netCDF file with a Mercator grid_mapping
cube = iris.load_cube(
tests.get_data_path(('NetCDF', 'mercator',
'toa_brightness_temperature.nc')))
self.assertCML(cube, ('netcdf', 'netcdf_merc.cml'))
def test_load_stereographic_grid(self):
# Test loading a single CF-netCDF file with a stereographic
# grid_mapping.
cube = iris.load_cube(
tests.get_data_path(('NetCDF', 'stereographic',
'toa_brightness_temperature.nc')))
self.assertCML(cube, ('netcdf', 'netcdf_stereo.cml'))
def test_cell_methods(self):
# Test exercising CF-netCDF cell method parsing.
cubes = iris.load(tests.get_data_path(('NetCDF', 'testing',
'cell_methods.nc')))
# TEST_COMPAT mod - new cube merge doesn't sort in the same way - test
# can pass by manual sorting...
cubes = iris.cube.CubeList(sorted(cubes, key=lambda cube: cube.name()))
# TEST_COMPAT mod - different versions of the Python module
# `netCDF4` give different data arrays: MaskedArray vs ndarray
# Since we're not interested in the data we can just normalise
# to MaskedArray (to minimise the change).
for cube in cubes:
# Force the fill value to be the default netCDF fill value
# to ensure it matches the previous behaviour.
cube.data = ma.masked_equal(cube.data, -2147483647)
self.assertCML(cubes, ('netcdf', 'netcdf_cell_methods.cml'))
def test_deferred_loading(self):
# Test exercising CF-netCDF deferred loading and deferred slicing.
# shape (31, 161, 320)
cube = iris.load_cube(tests.get_data_path(
('NetCDF', 'global', 'xyt', 'SMALL_total_column_co2.nc')))
# Consecutive index on same dimension.
self.assertCML(cube[0], ('netcdf', 'netcdf_deferred_index_0.cml'))
self.assertCML(cube[0][0], ('netcdf', 'netcdf_deferred_index_1.cml'))
self.assertCML(cube[0][0][0], ('netcdf',
'netcdf_deferred_index_2.cml'))
# Consecutive slice on same dimension.
self.assertCML(cube[0:20], ('netcdf', 'netcdf_deferred_slice_0.cml'))
self.assertCML(cube[0:20][0:10], ('netcdf',
'netcdf_deferred_slice_1.cml'))
self.assertCML(cube[0:20][0:10][0:5], ('netcdf',
'netcdf_deferred_slice_2.cml'))
# Consecutive tuple index on same dimension.
self.assertCML(cube[(0, 8, 4, 2, 14, 12), ],
('netcdf', 'netcdf_deferred_tuple_0.cml'))
self.assertCML(cube[(0, 8, 4, 2, 14, 12), ][(0, 2, 4, 1), ],
('netcdf', 'netcdf_deferred_tuple_1.cml'))
subcube = cube[(0, 8, 4, 2, 14, 12), ][(0, 2, 4, 1), ][(1, 3), ]
self.assertCML(subcube, ('netcdf', 'netcdf_deferred_tuple_2.cml'))
# Consecutive mixture on same dimension.
self.assertCML(cube[0:20:2][(9, 5, 8, 0), ][3],
('netcdf', 'netcdf_deferred_mix_0.cml'))
self.assertCML(cube[(2, 7, 3, 4, 5, 0, 9, 10), ][2:6][3],
('netcdf', 'netcdf_deferred_mix_0.cml'))
self.assertCML(cube[0][(0, 2), (1, 3)],
('netcdf', 'netcdf_deferred_mix_1.cml'))
def test_units(self):
# Test exercising graceful cube and coordinate units loading.
cube0, cube1 = sorted(iris.load(tests.get_data_path(('NetCDF',
'testing',
'units.nc'))),
key=lambda cube: cube.var_name)
self.assertCML(cube0, ('netcdf', 'netcdf_units_0.cml'))
self.assertCML(cube1, ('netcdf', 'netcdf_units_1.cml'))
class TestNetCDFCRS(tests.IrisTest):
def setUp(self):
class Var(object):
pass
self.grid = Var()
def test_lat_lon_major_minor(self):
major = 63781370
minor = 63567523
self.grid.semi_major_axis = major
self.grid.semi_minor_axis = minor
crs = pyke_rules.build_coordinate_system(self.grid)
self.assertEqual(crs, icoord_systems.GeogCS(major, minor))
def test_lat_lon_earth_radius(self):
earth_radius = 63700000
self.grid.earth_radius = earth_radius
crs = pyke_rules.build_coordinate_system(self.grid)
self.assertEqual(crs, icoord_systems.GeogCS(earth_radius))
class SaverPermissions(tests.IrisTest):
def test_noexist_directory(self):
# Test capture of suitable exception raised on writing to a
# non-existent directory.
dir_name = os.path.join(tempfile.gettempdir(), 'non_existent_dir')
fnme = os.path.join(dir_name, 'tmp.nc')
with self.assertRaises(IOError):
with iris.fileformats.netcdf.Saver(fnme, 'NETCDF4'):
pass
def test_bad_permissions(self):
# Non-exhaustive check that wrong permissions results in a suitable
# exception being raised.
dir_name = tempfile.mkdtemp()
fnme = os.path.join(dir_name, 'tmp.nc')
try:
os.chmod(dir_name, stat.S_IREAD)
with self.assertRaises(IOError):
iris.fileformats.netcdf.Saver(fnme, 'NETCDF4')
self.assertFalse(os.path.exists(fnme))
finally:
os.rmdir(dir_name)
@tests.skip_data
class TestSave(tests.IrisTest):
def test_hybrid(self):
cube = stock.realistic_4d()
# Write Cube to netCDF file.
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cube, file_out, netcdf_format='NETCDF3_CLASSIC')
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf',
'netcdf_save_realistic_4d.cdl'))
def test_no_hybrid(self):
cube = stock.realistic_4d()
cube.remove_aux_factory(cube.aux_factories[0])
# Write Cube to netCDF file.
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cube, file_out, netcdf_format='NETCDF3_CLASSIC')
# Check the netCDF file against CDL expected output.
self.assertCDL(
file_out, ('netcdf', 'netcdf_save_realistic_4d_no_hybrid.cdl'))
def test_scalar_cube(self):
cube = stock.realistic_4d()[0, 0, 0, 0]
with self.temp_filename(suffix='.nc') as filename:
iris.save(cube, filename, netcdf_format='NETCDF3_CLASSIC')
self.assertCDL(filename, ('netcdf',
'netcdf_save_realistic_0d.cdl'))
def test_no_name_cube(self):
# Cube with no names.
cube = iris.cube.Cube(np.arange(20, dtype=np.float64).reshape((4, 5)))
dim0 = iris.coords.DimCoord(np.arange(4, dtype=np.float64))
dim1 = iris.coords.DimCoord(np.arange(5, dtype=np.float64), units='m')
other = iris.coords.AuxCoord('foobar', units='no_unit')
cube.add_dim_coord(dim0, 0)
cube.add_dim_coord(dim1, 1)
cube.add_aux_coord(other)
with self.temp_filename(suffix='.nc') as filename:
iris.save(cube, filename, netcdf_format='NETCDF3_CLASSIC')
self.assertCDL(filename, ('netcdf', 'netcdf_save_no_name.cdl'))
class TestNetCDFSave(tests.IrisTest):
def setUp(self):
self.cubell = iris.cube.Cube(np.arange(4).reshape(2, 2),
'air_temperature')
self.cube = iris.cube.Cube(np.zeros([2, 2]),
standard_name='surface_temperature',
long_name=None,
var_name='temp',
units='K')
self.cube2 = iris.cube.Cube(np.ones([1, 2, 2]),
standard_name=None,
long_name='Something Random',
var_name='temp2',
units='K')
self.cube3 = iris.cube.Cube(np.ones([2, 2, 2]),
standard_name=None,
long_name='Something Random',
var_name='temp3',
units='K')
self.cube4 = iris.cube.Cube(np.zeros([10]),
standard_name='air_temperature',
long_name=None,
var_name='temp',
units='K')
self.cube5 = iris.cube.Cube(np.ones([20]),
standard_name=None,
long_name='air_temperature',
var_name='temp2',
units='K')
self.cube6 = iris.cube.Cube(np.ones([10]),
standard_name=None,
long_name='air_temperature',
var_name='temp3',
units='K')
@tests.skip_data
def test_netcdf_save_format(self):
# Read netCDF input file.
file_in = tests.get_data_path(
('NetCDF', 'global', 'xyt', 'SMALL_total_column_co2.nc'))
cube = iris.load_cube(file_in)
with self.temp_filename(suffix='.nc') as file_out:
# Test default NETCDF4 file format saving.
iris.save(cube, file_out)
ds = nc.Dataset(file_out)
self.assertEqual(ds.file_format, 'NETCDF4',
'Failed to save as NETCDF4 format')
ds.close()
# Test NETCDF4_CLASSIC file format saving.
iris.save(cube, file_out, netcdf_format='NETCDF4_CLASSIC')
ds = nc.Dataset(file_out)
self.assertEqual(ds.file_format, 'NETCDF4_CLASSIC',
'Failed to save as NETCDF4_CLASSIC format')
ds.close()
# Test NETCDF3_CLASSIC file format saving.
iris.save(cube, file_out, netcdf_format='NETCDF3_CLASSIC')
ds = nc.Dataset(file_out)
self.assertEqual(ds.file_format, 'NETCDF3_CLASSIC',
'Failed to save as NETCDF3_CLASSIC format')
ds.close()
# Test NETCDF4_64BIT file format saving.
iris.save(cube, file_out, netcdf_format='NETCDF3_64BIT')
ds = nc.Dataset(file_out)
self.assertTrue(ds.file_format in ['NETCDF3_64BIT',
'NETCDF3_64BIT_OFFSET'],
'Failed to save as NETCDF3_64BIT format')
ds.close()
# Test invalid file format saving.
with self.assertRaises(ValueError):
iris.save(cube, file_out, netcdf_format='WIBBLE')
@tests.skip_data
def test_netcdf_save_single(self):
# Test saving a single CF-netCDF file.
# Read PP input file.
file_in = tests.get_data_path(
('PP', 'cf_processing',
'000003000000.03.236.000128.1990.12.01.00.00.b.pp'))
cube = iris.load_cube(file_in)
# Write Cube to netCDF file.
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cube, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf', 'netcdf_save_single.cdl'))
# TODO investigate why merge now make time an AuxCoord rather than a
# DimCoord and why forecast_period is 'preferred'.
@tests.skip_data
def test_netcdf_save_multi2multi(self):
# Test saving multiple CF-netCDF files.
# Read PP input file.
file_in = tests.get_data_path(('PP', 'cf_processing',
'abcza_pa19591997_daily_29.b.pp'))
cubes = iris.load(file_in)
# Save multiple cubes to multiple files.
for index, cube in enumerate(cubes):
# Write Cube to netCDF file.
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cube, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf',
'netcdf_save_multi_%d.cdl' % index))
@tests.skip_data
def test_netcdf_save_multi2single(self):
# Test saving multiple cubes to a single CF-netCDF file.
# Read PP input file.
file_in = tests.get_data_path(('PP', 'cf_processing',
'abcza_pa19591997_daily_29.b.pp'))
cubes = iris.load(file_in)
# Write Cube to netCDF file.
with self.temp_filename(suffix='.nc') as file_out:
# Check that it is the same on loading
iris.save(cubes, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf', 'netcdf_save_multiple.cdl'))
def test_netcdf_multi_nocoord(self):
# Testing the saving of a cublist with no coords.
cubes = iris.cube.CubeList([self.cube, self.cube2, self.cube3])
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cubes, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf', 'netcdf_save_nocoord.cdl'))
def test_netcdf_multi_samevarnme(self):
# Testing the saving of a cublist with cubes of the same var_name.
self.cube2.var_name = self.cube.var_name
cubes = iris.cube.CubeList([self.cube, self.cube2])
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cubes, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf', 'netcdf_save_samevar.cdl'))
def test_netcdf_multi_with_coords(self):
# Testing the saving of a cublist with coordinates.
lat = iris.coords.DimCoord(np.arange(2),
long_name=None, var_name='lat',
units='degree_north')
lon = iris.coords.DimCoord(np.arange(2), standard_name='longitude',
long_name=None, var_name='lon',
units='degree_east')
rcoord = iris.coords.DimCoord(np.arange(1), standard_name=None,
long_name='Rnd Coordinate',
units=None)
self.cube.add_dim_coord(lon, 0)
self.cube.add_dim_coord(lat, 1)
self.cube2.add_dim_coord(lon, 1)
self.cube2.add_dim_coord(lat, 2)
self.cube2.add_dim_coord(rcoord, 0)
cubes = iris.cube.CubeList([self.cube, self.cube2])
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cubes, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf', 'netcdf_save_wcoord.cdl'))
def test_netcdf_multi_wtih_samedimcoord(self):
time1 = iris.coords.DimCoord(np.arange(10),
standard_name='time',
var_name='time')
time2 = iris.coords.DimCoord(np.arange(20),
standard_name='time',
var_name='time')
self.cube4.add_dim_coord(time1, 0)
self.cube5.add_dim_coord(time2, 0)
self.cube6.add_dim_coord(time1, 0)
cubes = iris.cube.CubeList([self.cube4, self.cube5, self.cube6])
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cubes, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf',
'netcdf_save_samedimcoord.cdl'))
def test_netcdf_multi_conflict_name_dup_coord(self):
# Duplicate coordinates with modified variable names lookup.
latitude1 = iris.coords.DimCoord(np.arange(10),
standard_name='latitude')
time2 = iris.coords.DimCoord(np.arange(2),
standard_name='time')
latitude2 = iris.coords.DimCoord(np.arange(2),
standard_name='latitude')
self.cube6.add_dim_coord(latitude1, 0)
self.cube.add_dim_coord(latitude2[:], 1)
self.cube.add_dim_coord(time2[:], 0)
cubes = iris.cube.CubeList([self.cube, self.cube6, self.cube6.copy()])
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cubes, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(
file_out, ('netcdf', 'multi_dim_coord_slightly_different.cdl'))
@tests.skip_data
def test_netcdf_hybrid_height(self):
# Test saving a CF-netCDF file which contains a hybrid height
# (i.e. dimensionless vertical) coordinate.
# Read PP input file.
names = ['air_potential_temperature', 'surface_altitude']
file_in = tests.get_data_path(
('PP', 'COLPEX', 'small_colpex_theta_p_alt.pp'))
cube = iris.load_cube(file_in, names[0])
# Write Cube to netCDF file.
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cube, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out,
('netcdf', 'netcdf_save_hybrid_height.cdl'))
# Read netCDF file.
cubes = iris.load(file_out)
cubes_names = [c.name() for c in cubes]
self.assertEqual(cubes_names, names)
# Check the PP read, netCDF write, netCDF read mechanism.
self.assertCML(cubes.extract(names[0])[0],
('netcdf', 'netcdf_save_load_hybrid_height.cml'))
@tests.skip_data
def test_netcdf_save_ndim_auxiliary(self):
# Test saving CF-netCDF with multi-dimensional auxiliary coordinates.
# Read netCDF input file.
file_in = tests.get_data_path(
('NetCDF', 'rotated', 'xyt', 'small_rotPole_precipitation.nc'))
cube = iris.load_cube(file_in)
# Write Cube to nerCDF file.
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cube, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf',
'netcdf_save_ndim_auxiliary.cdl'))
# Read the netCDF file.
cube = iris.load_cube(file_out)
# Check the netCDF read, write, read mechanism.
self.assertCML(cube, ('netcdf',
'netcdf_save_load_ndim_auxiliary.cml'))
def test_netcdf_save_conflicting_aux(self):
# Test saving CF-netCDF with multi-dimensional auxiliary coordinates,
# with conflicts.
self.cube4.add_aux_coord(iris.coords.AuxCoord(np.arange(10),
'time'), 0)
self.cube6.add_aux_coord(iris.coords.AuxCoord(np.arange(10, 20),
'time'), 0)
cubes = iris.cube.CubeList([self.cube4, self.cube6])
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cubes, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf', 'netcdf_save_conf_aux.cdl'))
def test_netcdf_save_gridmapping(self):
# Test saving CF-netCDF from a cubelist with various grid mappings.
c1 = self.cubell
c2 = self.cubell.copy()
c3 = self.cubell.copy()
coord_system = icoord_systems.GeogCS(6371229)
coord_system2 = icoord_systems.GeogCS(6371228)
coord_system3 = icoord_systems.RotatedGeogCS(30, 30)
c1.add_dim_coord(iris.coords.DimCoord(
np.arange(1, 3), 'latitude', long_name='1', units='degrees',
coord_system=coord_system), 1)
c1.add_dim_coord(iris.coords.DimCoord(
np.arange(1, 3), 'longitude', long_name='1', units='degrees',
coord_system=coord_system), 0)
c2.add_dim_coord(iris.coords.DimCoord(
np.arange(1, 3), 'latitude', long_name='2', units='degrees',
coord_system=coord_system2), 1)
c2.add_dim_coord(iris.coords.DimCoord(
np.arange(1, 3), 'longitude', long_name='2', units='degrees',
coord_system=coord_system2), 0)
c3.add_dim_coord(iris.coords.DimCoord(
np.arange(1, 3), 'grid_latitude', long_name='3', units='degrees',
coord_system=coord_system3), 1)
c3.add_dim_coord(iris.coords.DimCoord(
np.arange(1, 3), 'grid_longitude', long_name='3', units='degrees',
coord_system=coord_system3), 0)
cubes = iris.cube.CubeList([c1, c2, c3])
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cubes, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf',
'netcdf_save_gridmapmulti.cdl'))
def test_netcdf_save_conflicting_names(self):
# Test saving CF-netCDF with a dimension name corresponding to
# an existing variable name (conflict).
self.cube4.add_dim_coord(iris.coords.DimCoord(np.arange(10),
'time'), 0)
self.cube6.add_aux_coord(iris.coords.AuxCoord(1, 'time'), None)
cubes = iris.cube.CubeList([self.cube4, self.cube6])
with self.temp_filename(suffix='.nc') as file_out:
iris.save(cubes, file_out)
# Check the netCDF file against CDL expected output.
self.assertCDL(file_out, ('netcdf', 'netcdf_save_conf_name.cdl'))
@tests.skip_data
def test_trajectory(self):
file_in = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
cube = iris.load_cube(file_in)
# extract a trajectory
xpoint = cube.coord('longitude').points[:10]
ypoint = cube.coord('latitude').points[:10]
sample_points = [('latitude', xpoint), ('longitude', ypoint)]
traj = iris.analysis.trajectory.interpolate(cube, sample_points)
# save, reload and check
with self.temp_filename(suffix='.nc') as temp_filename:
iris.save(traj, temp_filename)
reloaded = iris.load_cube(temp_filename)
self.assertCML(reloaded,
('netcdf', 'save_load_traj.cml'),
checksum=False)
self.assertArrayEqual(traj.data, reloaded.data)
def test_attributes(self):
# Should be global attributes.
aglobals = {'history': 'A long time ago...',
'title': 'Attribute test',
'foo': 'bar'}
for k, v in six.iteritems(aglobals):
self.cube.attributes[k] = v
# Should be overriden.
aover = {'Conventions': 'TEST'}
for k, v in six.iteritems(aover):
self.cube.attributes[k] = v
# Should be data varible attributes.
avars = {'standard_error_multiplier': 23,
'flag_masks': 'a',
'flag_meanings': 'b',
'flag_values': 'c',
'STASH': iris.fileformats.pp.STASH(1, 2, 3)}
for k, v in six.iteritems(avars):
self.cube.attributes[k] = v
with self.temp_filename(suffix='.nc') as filename:
iris.save(self.cube, filename)
# Load the dataset.
ds = nc.Dataset(filename, 'r')
exceptions = []
# Should be global attributes.
for gkey in aglobals:
if getattr(ds, gkey) != aglobals.get(gkey):
exceptions.append('{} != {}'.format(getattr(ds, gkey),
aglobals.get(gkey)))
# Should be overriden.
for okey in aover:
if getattr(ds, okey) == aover.get(okey):
exceptions.append('{} != {}'.format(getattr(ds, okey),
avars.get(okey)))
dv = ds['temp']
# Should be data varible attributes;
# except STASH -> um_stash_source.
for vkey in avars:
if vkey != 'STASH' and (getattr(dv, vkey) != avars.get(vkey)):
exceptions.append('{} != {}'.format(getattr(dv, vkey),
avars.get(vkey)))
if getattr(dv, 'um_stash_source') != avars.get('STASH'):
exc = '{} != {}'.format(getattr(dv, 'um_stash_source'),
avars.get(vkey))
exceptions.append(exc)
self.assertEqual(exceptions, [])
def test_conflicting_attributes(self):
# Should be data variable attributes.
self.cube.attributes['foo'] = 'bar'
self.cube2.attributes['foo'] = 'orange'
with self.temp_filename(suffix='.nc') as filename:
iris.save([self.cube, self.cube2], filename)
self.assertCDL(filename, ('netcdf', 'netcdf_save_confl_attr.cdl'))
def test_conflicting_global_attributes(self):
# Should be data variable attributes, but raise a warning.
attr_name = 'history'
self.cube.attributes[attr_name] = 'Team A won.'
self.cube2.attributes[attr_name] = 'Team B won.'
expected_msg = '{attr_name!r} is being added as CF data variable ' \
'attribute, but {attr_name!r} should only be a CF ' \
'global attribute.'.format(attr_name=attr_name)
with self.temp_filename(suffix='.nc') as filename:
with mock.patch('warnings.warn') as warn:
iris.save([self.cube, self.cube2], filename)
warn.assert_called_with(expected_msg)
self.assertCDL(filename,
('netcdf', 'netcdf_save_confl_global_attr.cdl'))
def test_no_global_attributes(self):
# Should all be data variable attributes.
# Different keys.
self.cube.attributes['a'] = 'a'
self.cube2.attributes['b'] = 'a'
self.cube3.attributes['c'] = 'a'
self.cube4.attributes['d'] = 'a'
self.cube5.attributes['e'] = 'a'
self.cube6.attributes['f'] = 'a'
# Different values.
self.cube.attributes['g'] = 'p'
self.cube2.attributes['g'] = 'q'
self.cube3.attributes['g'] = 'r'
self.cube4.attributes['g'] = 's'
self.cube5.attributes['g'] = 't'
self.cube6.attributes['g'] = 'u'
# One different value.
self.cube.attributes['h'] = 'v'
self.cube2.attributes['h'] = 'v'
self.cube3.attributes['h'] = 'v'
self.cube4.attributes['h'] = 'w'
self.cube5.attributes['h'] = 'v'
self.cube6.attributes['h'] = 'v'
cubes = [self.cube, self.cube2, self.cube3,
self.cube4, self.cube5, self.cube6]
with self.temp_filename(suffix='.nc') as filename:
iris.save(cubes, filename)
self.assertCDL(filename, ('netcdf',
'netcdf_save_no_global_attr.cdl'))
class TestNetCDF3SaveInteger(tests.IrisTest):
def setUp(self):
self.cube = iris.cube.Cube(np.zeros((2, 2), dtype=np.float64),
standard_name='surface_temperature',
long_name=None,
var_name='temp',
units='K')
def test_int64_dimension_coord_netcdf3(self):
coord = iris.coords.DimCoord(np.array([1, 2], dtype=np.int64),
long_name='x')
self.cube.add_dim_coord(coord, 0)
with self.temp_filename(suffix='.nc') as filename:
iris.save(self.cube, filename, netcdf_format='NETCDF3_CLASSIC')
reloaded = iris.load_cube(filename)
self.assertCML(reloaded, ('netcdf',
'int64_dimension_coord_netcdf3.cml'),
checksum=False)
def test_int64_auxiliary_coord_netcdf3(self):
coord = iris.coords.AuxCoord(np.array([1, 2], dtype=np.int64),
long_name='x')
self.cube.add_aux_coord(coord, 0)
with self.temp_filename(suffix='.nc') as filename:
iris.save(self.cube, filename, netcdf_format='NETCDF3_CLASSIC')
reloaded = iris.load_cube(filename)
self.assertCML(reloaded, ('netcdf',
'int64_auxiliary_coord_netcdf3.cml'),
checksum=False)
def test_int64_data_netcdf3(self):
self.cube.data = self.cube.data.astype(np.int64)
with self.temp_filename(suffix='.nc') as filename:
iris.save(self.cube, filename, netcdf_format='NETCDF3_CLASSIC')
reloaded = iris.load_cube(filename)
self.assertCML(reloaded, ('netcdf',
'int64_data_netcdf3.cml'))
def test_uint32_dimension_coord_netcdf3(self):
coord = iris.coords.DimCoord(np.array([1, 2], dtype=np.uint32),
long_name='x')
self.cube.add_dim_coord(coord, 0)
with self.temp_filename(suffix='.nc') as filename:
iris.save(self.cube, filename, netcdf_format='NETCDF3_CLASSIC')
reloaded = iris.load_cube(filename)
self.assertCML(reloaded, ('netcdf',
'uint32_dimension_coord_netcdf3.cml'),
checksum=False)
def test_uint32_auxiliary_coord_netcdf3(self):
coord = iris.coords.AuxCoord(np.array([1, 2], dtype=np.uint32),
long_name='x')
self.cube.add_aux_coord(coord, 0)
with self.temp_filename(suffix='.nc') as filename:
iris.save(self.cube, filename, netcdf_format='NETCDF3_CLASSIC')
reloaded = iris.load_cube(filename)
self.assertCML(reloaded, ('netcdf',
'uint32_auxiliary_coord_netcdf3.cml'),
checksum=False)
def test_uint32_data_netcdf3(self):
self.cube.data = self.cube.data.astype(np.uint32)
with self.temp_filename(suffix='.nc') as filename:
iris.save(self.cube, filename, netcdf_format='NETCDF3_CLASSIC')
reloaded = iris.load_cube(filename)
self.assertCML(reloaded, ('netcdf',
'uint32_data_netcdf3.cml'))
def test_uint64_dimension_coord_netcdf3(self):
# Points that cannot be safely cast to int32.
coord = iris.coords.DimCoord(np.array([0, 18446744073709551615],
dtype=np.uint64),
long_name='x')
self.cube.add_dim_coord(coord, 0)
with self.temp_filename(suffix='.nc') as filename:
with self.assertRaises(ValueError):
iris.save(self.cube, filename, netcdf_format='NETCDF3_CLASSIC')
def test_uint64_auxiliary_coord_netcdf3(self):
# Points that cannot be safely cast to int32.
coord = iris.coords.AuxCoord(np.array([0, 18446744073709551615],
dtype=np.uint64),
long_name='x')
self.cube.add_aux_coord(coord, 0)
with self.temp_filename(suffix='.nc') as filename:
with self.assertRaises(ValueError):
iris.save(self.cube, filename, netcdf_format='NETCDF3_CLASSIC')
def test_uint64_data_netcdf3(self):
# Data that cannot be safely cast to int32.
self.cube.data = self.cube.data.astype(np.uint64)
self.cube.data[0, 1] = 18446744073709551615
with self.temp_filename(suffix='.nc') as filename:
with self.assertRaises(ValueError):
iris.save(self.cube, filename, netcdf_format='NETCDF3_CLASSIC')
class TestCFStandardName(tests.IrisTest):
def setUp(self):
pass
def test_std_name_lookup_pass(self):
# Test performing a CF standard name look-up hit.
self.assertTrue('time' in iris.std_names.STD_NAMES)
def test_std_name_lookup_fail(self):
# Test performing a CF standard name look-up miss.
self.assertFalse('phenomenon_time' in iris.std_names.STD_NAMES)
@tests.skip_data
class TestNetCDFUKmoProcessFlags(tests.IrisTest):
def test_process_flags(self):
# Test single process flags
for _, process_desc in iris.fileformats.pp.LBPROC_PAIRS[1:]:
# Get basic cube and set process flag manually
ll_cube = stock.lat_lon_cube()
ll_cube.attributes["ukmo__process_flags"] = (process_desc,)
# Save cube to netCDF
with self.temp_filename(suffix='.nc') as temp_filename:
iris.save(ll_cube, temp_filename)
# Reload cube
cube = iris.load_cube(temp_filename)
# Check correct number and type of flags
self.assertTrue(
len(cube.attributes["ukmo__process_flags"]) == 1,
"Mismatch in number of process flags.")
process_flag = cube.attributes["ukmo__process_flags"][0]
self.assertEqual(process_flag, process_desc)
# Test mutiple process flags
multiple_bit_values = ((128, 64), (4096, 1024), (8192, 1024))
# Maps lbproc value to the process flags that should be created
multiple_map = {bits: [iris.fileformats.pp.lbproc_map[bit] for
bit in bits] for bits in multiple_bit_values}
for bits, descriptions in six.iteritems(multiple_map):
ll_cube = stock.lat_lon_cube()
ll_cube.attributes["ukmo__process_flags"] = descriptions
# Save cube to netCDF
with self.temp_filename(suffix='.nc') as temp_filename:
iris.save(ll_cube, temp_filename)
# Reload cube
cube = iris.load_cube(temp_filename)
# Check correct number and type of flags
process_flags = cube.attributes["ukmo__process_flags"]
self.assertTrue(len(process_flags) == len(bits), 'Mismatch in '
'number of process flags.')
self.assertEqual(set(process_flags), set(descriptions))
if __name__ == "__main__":
tests.main()
|
""" turretFire.py
Use vector projection
to add a shell to the turret"""
import pygame, math
pygame.init()
class Label(pygame.sprite.Sprite):
""" Label Class (simplest version)
Properties:
font: any pygame font object
text: text to display
center: desired position of label center (x, y)
"""
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.font = pygame.font.SysFont("None", 30)
self.text = ""
self.center = (320, 240)
def update(self):
self.image = self.font.render(self.text, 1, (0, 0, 0))
self.rect = self.image.get_rect()
self.rect.center = self.center
class Turret(pygame.sprite.Sprite):
def __init__(self, shell):
self.shell = shell
pygame.sprite.Sprite.__init__(self)
self.imageMaster = pygame.image.load("turret.gif")
self.imageMaster = self.imageMaster.convert()
self.rect = self.imageMaster.get_rect()
self.rect.center = (10, 480)
self.TURNRATE = 10
self.dir = 45
self.charge = 5
def update(self):
self.checkKeys()
self.rotate()
def checkKeys(self):
keys = pygame.key.get_pressed()
if keys[pygame.K_LEFT]:
self.dir += self.TURNRATE
if self.dir > 360:
self.dir = self.TURNRATE
if keys[pygame.K_RIGHT]:
self.dir -= self.TURNRATE
if self.dir < 0:
self.dir = 360 - self.TURNRATE
if keys[pygame.K_UP]:
self.charge += 1
if self.charge > 20:
self.charge = 20
if keys[pygame.K_DOWN]:
self.charge -= 1
if self.charge < 0:
self.charge = 0
if keys[pygame.K_SPACE]:
self.shell.x = self.rect.centerx
self.shell.y = self.rect.centery
self.shell.speed = self.charge
self.shell.dir = self.dir
self.shell.calcVector()
def rotate(self):
oldCenter = self.rect.center
self.image = pygame.transform.rotate(self.imageMaster, self.dir)
self.rect = self.image.get_rect()
self.rect.center = oldCenter
class Shell(pygame.sprite.Sprite):
def __init__(self, screen, background):
pygame.sprite.Sprite.__init__(self)
self.screen = screen
self.background = background
self.image = pygame.Surface((10, 10))
self.image.fill((0xff, 0xff, 0xff))
self.image.set_colorkey((0xff, 0xff, 0xff))
pygame.draw.circle(self.image, (0, 0, 0), (5, 5), 5)
self.image = pygame.transform.scale(self.image, (5, 5))
self.rect = self.image.get_rect()
self.rect.center = (-100, -100)
self.x = -100
self.y = -100
self.dx = 0
self.dy = 0
self.speed = 0
self.dir = 0
self.gravity = .5
def update(self):
self.calcPos()
self.checkBounds()
self.rect.center = (self.x, self.y)
def calcVector(self):
radians = self.dir * math.pi / 180
self.dx = self.speed * math.cos(radians)
self.dy = self.speed * math.sin(radians)
self.dy *= -1
#clear the background
self.background.fill((0x00, 0xCC, 0x00))
def calcPos(self):
#compensate for gravity
self.dy += self.gravity
#get old position for drawing
oldx = self.x
oldy = self.y
self.x += self.dx
self.y += self.dy
pygame.draw.line(self.background, (0,0,0), (oldx, oldy), (self.x, self.y))
def checkBounds(self):
screen = self.screen
if self.x > screen.get_width():
self.reset()
if self.x < 0:
self.reset()
if self.y > screen.get_height():
self.reset()
if self.y < 0:
self.reset()
def reset(self):
""" move off stage and stop"""
self.x = -100
self.y = -100
self.speed = 0
def main():
screen = pygame.display.set_mode((640, 480))
pygame.display.set_caption ("Firing a Shell")
background = pygame.Surface(screen.get_size())
background.fill((0x00, 0xCC, 0x00))
screen.blit(background, (0, 0))
shell = Shell(screen, background)
turret = Turret(shell)
lblOutput = Label()
lblOutput.center = (100, 20)
allSprites = pygame.sprite.Group(shell, turret, lblOutput)
clock = pygame.time.Clock()
keepGoing = True
while keepGoing:
clock.tick(30)
for event in pygame.event.get():
if event.type == pygame.QUIT:
keepGoing = False
#update label
lblOutput.text = "dir: %d speed %d" % (turret.dir, turret.charge)
#blit the background for drawings
screen.blit(background, (0, 0))
#allSprites.clear(screen, background)
allSprites.update()
allSprites.draw(screen)
pygame.display.flip()
if __name__ == "__main__":
main()
|
import os
import subprocess
def get_file(formato):
'''
Función para petición y validación
de un archivo en un formato específico
'''
get_file = False
while not get_file:
# Petición de archivo .p12
archivo = raw_input("Introduzca el archivo .%s: " % (formato))
# Comprobando si el archivo existe
if os.access(archivo, os.R_OK) == False:
print "No se consiguió el archivo %s." % (archivo)
# Comprobando que el archivo sea .p12
elif archivo.split('.')[1] != formato:
print "El archivo no es .%s, es .%s " % (formato, archivo.split('.')[1])
else:
get_file = True
return archivo
get_file = False
def __main__():
print "*************************************"
print "Iniciando script para crackear pkcs12"
print "*************************************"
archivo = ''
contador = 0
encontrado = False
# Se pide la introducción del archivo .p12
archivo_p12 = get_file('p12')
# Se pide la introducción del archivo .pem
archivo_pem = get_file('pem')
# Se pide la introducción del archivo para el diccionario
print "\nSe necesita un diccionario donde estén las posibles contraseñas"
diccionario = get_file('txt')
diccionario = open(diccionario)
linea = diccionario.readline()
while linea != '':
contador += 1
resultado = subprocess.Popen("openssl pkcs12 -in %s -clcerts -nokeys -out %s -password pass:%s" % (archivo_p12, archivo_pem, linea), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
print "Contraseñas probadas: %d" % (contador)
if str(resultado.stdout.readline()).__contains__("OK"):
clave = linea
encontrado = True
break
linea = diccionario.readline()
if encontrado:
print "La clave es %s" % (str(linea))
else:
print "No se encontró la clave en %s" % (diccionario.name)
__main__()
|
"""
ai.py - Artificial Intelligence Module
Copyright 2009-2011, Michael Yanovich, yanovich.net
Licensed under the Eiffel Forum License 2.
http://sopel.chat
"""
from cleverwrap import CleverWrap
from sopel.module import rule, priority, rate
import random
import time
import os
import html.parser
import cleverbot
cleverbot=CleverWrap("830d8fe7b7ad941487e46f593af70370","tuxbot")
# if os.path.isfile("bot_brain.brn"):
# kernel.bootstrap(brainFile = "bot_brain.brn")
#else:
# kernel.bootstrap(learnFiles = "std-startup.xml", commands = "load aiml b")
# kernel.saveBrain("bot_brain.brn")
# Set value to 3 if not configured
@rule('tuxbot[\::]\s(.*)')
def goodbye(bot, trigger):
#if "?" in trigger.group(1):
reply1=cleverbot.say(trigger.group(1))
#print(reply1)
if "|" in reply1:
reply1=html.unescape(reply1).encode('utf8').replace("|","\\u").replace(".","").lower()
bot.reply(reply1)
#bot.reply(html.unescape(bytes(reply1,"utf-8").decode("unicode_escape")))
#else:
# bot.reply(kernel.respond(trigger.group(1)))
|
from django.contrib import admin
from candidatos.models import Candidato
class CandidatoAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['nombre', 'apellidos', 'imagen', 'email']}),
('Datos personales', {'fields': ['nif', 'telefono']}),
('Candidatura', {'fields': ['secretario', 'consejo', 'biografia', 'motivacion', 'youtube', 'activo']}),
]
list_display = ('nombre', 'apellidos', 'email', 'secretario', 'consejo', 'activo')
list_filter = ['secretario', 'consejo']
seach_fields = ['nombre', 'apellidos', 'email']
admin.site.register(Candidato, CandidatoAdmin)
|
from collections import OrderedDict as OD
from math import ceil
from util.columns import *
from .callbacks import spi_efc_cmd_cb
from .ADF4350 import columns
from ..regs import RegsData, regs_cb
hex_data = '''
R0|FEF|
'''
bin_data = '''
R0|0|VGA1 Setpoint
R0|3|VGA2 Setpoint
R0|6|VGA2 Switch
R0|7|VGA1 Maximum Gain
R0|9|VGA2 Maximum Gain
'''
def columns():
return get_columns([c_ip_addr, c_spi])
def get_menu(dev):
return OD([('Registers', regs_cb)])
vga_setpoints=['+62.5/−24', '+88/−21', '+125/−18', '+176/−15', '+250/−12', '+353/−9', '+500/−6', '+707/−3']
vga2_input=['IP2A, IM2A', 'IP2B, IM2B']
vga1_gain=['9.5', '12', '14', '15.6']
vga2_gain=['14', '16.9', '19', '20.9']
def get_regs(dev):
data = RegsData(sz=11)
data.add_page('calc0')
data.add('vga1_setpoint', wdgt='combo', state='readonly', label='VGA1 RMS Output, mV rms/dBV', value=vga_setpoints, src=lambda d,v: d.list_src('R0', 0, 2, vga_setpoints, v))
data.add_page('calc1')
data.add('vga2_setpoint', wdgt='combo', state='readonly', label='VGA2 RMS Output, mV rms/dBV', value=vga_setpoints, src=lambda d,v: d.list_src('R0', 3, 5, vga_setpoints, v))
data.add_page('calc3')
data.add('vga2_input', wdgt='combo', state='readonly', label='VGA2 Input', value=vga2_input, src=lambda d,v: d.list_src('R0', 6, 6, vga2_input, v))
data.add_page('calc4')
data.add('vga1_gain', wdgt='combo', state='readonly', label='VGA1 Maximum Gain, dB', value=vga1_gain, src=lambda d,v: d.list_src('R0', 7, 8, vga1_gain, v))
data.add_page('calc5')
data.add('vga2_gain', wdgt='combo', state='readonly', label='VGA2 Maximum Gain, dB', value=vga2_gain, src=lambda d,v: d.list_src('R0', 9, 10, vga2_gain, v))
data.add_hex_data(hex_data, cmd_cb=spi_efc_cmd_cb)
data.add_bin_data(bin_data)
return data
|
'''Trajectory writers'''
from __future__ import division
from yaff.sampling.iterative import Hook, AttributeStateItem, PosStateItem, CellStateItem, ConsErrStateItem
from yaff.sampling.nvt import NHCThermostat, NHCAttributeStateItem
from yaff.sampling.npt import MTKBarostat, MTKAttributeStateItem, TBCombination
from yaff.sampling.mc import MC
__all__ = ['HDF5Writer', 'MCHDF5Writer', 'XYZWriter', 'RestartWriter']
class BaseHDF5Writer(Hook):
def __init__(self, f, start=0, step=1):
"""
**Argument:**
f
A h5.File object to write the trajectory to.
**Optional arguments:**
start
The first iteration at which this hook should be called.
step
The hook will be called every `step` iterations.
"""
self.f = f
Hook.__init__(self, start, step)
def __call__(self, iterative):
if 'trajectory' not in self.f:
self.init_trajectory(iterative)
tgrp = self.f['trajectory']
# determine the row to write the current iteration to. If a previous
# iterations was not completely written, then the last row is reused.
row = min(tgrp[key].shape[0] for key in iterative.state if key in tgrp.keys())
for key, item in iterative.state.items():
if item.value is None:
continue
if len(item.shape) > 0 and min(item.shape) == 0:
continue
ds = tgrp[key]
if ds.shape[0] <= row:
# do not over-allocate. hdf5 works with chunks internally.
ds.resize(row+1, axis=0)
ds[row] = item.value
def dump_system(self, system, grp):
system.to_hdf5(grp)
def init_trajectory(self, iterative):
tgrp = self.f.create_group('trajectory')
for key, item in iterative.state.items():
if len(item.shape) > 0 and min(item.shape) == 0:
continue
if item.value is None:
continue
maxshape = (None,) + item.shape
shape = (0,) + item.shape
dset = tgrp.create_dataset(key, shape, maxshape=maxshape, dtype=item.dtype)
for name, value in item.iter_attrs(iterative):
tgrp.attrs[name] = value
class HDF5Writer(BaseHDF5Writer):
def __call__(self, iterative):
if 'system' not in self.f:
self.dump_system(iterative.ff.system, self.f)
BaseHDF5Writer.__call__(self, iterative)
class MCHDF5Writer(BaseHDF5Writer):
"""Write output to hdf5 file during Monte Carlo simulations"""
def __call__(self, mc):
assert isinstance(mc, MC)
# Dump the current configuration; the number of particles is not
# necessarily fixed during a simulation, so we can't just dump all
# positions in a giant array as done during MD simulations
if mc.current_configuration is not None:
grp = self.f.require_group("snapshots/%012d"%mc.counter)
self.dump_system(mc.current_configuration, grp)
# The standard way of dumping simulation info to the trajectory group
BaseHDF5Writer.__call__(self, mc)
class XYZWriter(Hook):
def __init__(self, fn_xyz, select=None, start=0, step=1):
"""
**Argument:**
fn_xyz
A filename to write the XYZ trajectory too.
**Optional arguments:**
select
A list of atom indexes that should be written to the trajectory
output. If not given, all atoms are included.
start
The first iteration at which this hook should be called.
step
The hook will be called every `step` iterations.
"""
self.fn_xyz = fn_xyz
self.select = select
self.xyz_writer = None
Hook.__init__(self, start, step)
def __call__(self, iterative):
from molmod import angstrom
if self.xyz_writer is None:
from molmod.periodic import periodic
from molmod.io import XYZWriter
numbers = iterative.ff.system.numbers
if self.select is None:
symbols = [periodic[n].symbol for n in numbers]
else:
symbols = [periodic[numbers[i]].symbol for i in self.select]
self.xyz_writer = XYZWriter(self.fn_xyz, symbols)
rvecs = iterative.ff.system.cell.rvecs.copy()
rvecs_string = " ".join([str(x[0]/angstrom) for x in rvecs.reshape((-1,1))])
title = '%7i E_pot = %.10f %s' % (iterative.counter, iterative.epot, rvecs_string)
if self.select is None:
pos = iterative.ff.system.pos
else:
pos = iterative.ff.system.pos[self.select]
self.xyz_writer.dump(title, pos)
class RestartWriter(Hook):
def __init__(self, f, start=0, step=1000):
"""
**Argument:**
f
A h5.File object to write the restart information to.
**Optional arguments:**
start
The first iteration at which this hook should be called.
step
The hook will be called every `step` iterations.
"""
self.f = f
self.state = None
self.default_state = None
Hook.__init__(self, start, step)
def init_state(self, iterative):
# Basic properties needed for the restart
self.default_state = [
AttributeStateItem('counter'),
AttributeStateItem('time'),
PosStateItem(),
AttributeStateItem('vel'),
CellStateItem(),
AttributeStateItem('econs'),
ConsErrStateItem('ekin_m'),
ConsErrStateItem('ekin_s'),
ConsErrStateItem('econs_m'),
ConsErrStateItem('econs_s')
]
# Dump the timestep
rgrp = self.f.create_group('restart')
rgrp.create_dataset('timestep', data = iterative.timestep)
# Verify whether there are any deterministic thermostats / barostats, and add them if present
thermo = None
baro = None
for hook in iterative.hooks:
if hook.kind == 'deterministic':
if hook.method == 'thermostat': thermo = hook
elif hook.method == 'barostat': baro = hook
elif hook.name == 'TBCombination':
if hook.thermostat.kind == 'deterministic': thermo = hook.thermostat
if hook.barostat.kind == 'deterministic': baro = hook.barostat
if thermo is not None:
self.dump_restart(thermo)
if thermo.name == 'NHC':
self.default_state.append(NHCAttributeStateItem('pos'))
self.default_state.append(NHCAttributeStateItem('vel'))
if baro is not None:
self.dump_restart(baro)
if baro.name == 'MTTK':
self.default_state.append(MTKAttributeStateItem('vel_press'))
if baro.baro_thermo is not None:
self.default_state.append(MTKAttributeStateItem('chain_pos'))
self.default_state.append(MTKAttributeStateItem('chain_vel'))
# Finalize the restart state items
self.state_list = [state_item.copy() for state_item in self.default_state]
self.state = dict((item.key, item) for item in self.state_list)
def __call__(self, iterative):
if 'system' not in self.f:
self.dump_system(iterative.ff.system)
if 'trajectory' not in self.f:
self.init_trajectory(iterative)
tgrp = self.f['trajectory']
# determine the row to write the current iteration to. If a previous
# iterations was not completely written, then the last row is reused.
row = min(tgrp[key].shape[0] for key in self.state if key in tgrp.keys())
for key, item in self.state.items():
if item.value is None:
continue
if len(item.shape) > 0 and min(item.shape) == 0:
continue
if item.value is None:
continue
ds = tgrp[key]
if ds.shape[0] <= row:
# do not over-allocate. hdf5 works with chunks internally.
ds.resize(row+1, axis=0)
ds[row] = item.value
def dump_system(self, system):
system.to_hdf5(self.f)
def init_trajectory(self, iterative):
tgrp = self.f.create_group('trajectory')
for key, item in self.state.items():
if len(item.shape) > 0 and min(item.shape) == 0:
continue
if item.value is None:
continue
maxshape = (None,) + item.shape
shape = (0,) + item.shape
dset = tgrp.create_dataset(key, shape, maxshape=maxshape, dtype=item.dtype)
for name, value in item.iter_attrs(iterative):
tgrp.attrs[name] = value
def dump_restart(self, hook):
rgrp = self.f['/restart']
if hook.method == 'thermostat':
# Dump the thermostat properties
rgrp.create_dataset('thermo_name', data = hook.name)
rgrp.create_dataset('thermo_temp', data = hook.temp)
rgrp.create_dataset('thermo_timecon', data = hook.chain.timecon)
elif hook.method == 'barostat':
# Dump the barostat properties
rgrp.create_dataset('baro_name', data = hook.name)
rgrp.create_dataset('baro_timecon', data = hook.timecon_press)
rgrp.create_dataset('baro_temp', data = hook.temp)
rgrp.create_dataset('baro_press', data = hook.press)
rgrp.create_dataset('baro_anisotropic', data = hook.anisotropic)
rgrp.create_dataset('vol_constraint', data = hook.vol_constraint)
if hook.name == 'Berendsen':
rgrp.create_dataset('beta', data = hook.beta)
if hook.name == 'MTTK' and hook.baro_thermo is not None:
# Dump the barostat thermostat properties
rgrp.create_dataset('baro_chain_temp', data = hook.baro_thermo.temp)
rgrp.create_dataset('baro_chain_timecon', data = hook.baro_thermo.chain.timecon)
|
import os
from setuptools import setup, find_packages
base_dir = os.path.dirname(__file__)
with open(os.path.join(base_dir, "README.md"), encoding="UTF-8") as stream:
long_description = stream.read()
setup(
name="textland",
version="0.1",
url="https://github.com/zyga/textland",
packages=find_packages(),
author="Zygmunt Krynicki",
author_email="zygmunt.krynicki@canonical.com",
license="GPLv3",
description="Like wayland, for text apps",
long_description=long_description)
|
import ssl
import SocketServer
import sqlite3
import os
RECEIVE_SIZE = 1024
UPDATE_SERVER_IP_AND_PORT = ('0.0.0.0', 5151)
PARAMETER_DELIMITER = ':'
LENGTH_PARAM = 0
TOKEN_PARAM = 1
OPCODE_PARAM = 2
NOTIFICATION_PARAM = 3
SETTINGS_REQUEST = "1"
NOTIFICATION_UPDATE = "2"
CURRENT_WORKING_DIRECTORY = os.getcwd() # Gives easy access to our server's files' path.
CODE_FILES_PATH = CURRENT_WORKING_DIRECTORY + '\\Templates\\' # The path to where all the server files are saved at
DB_PATH = CODE_FILES_PATH + 'userbase.db'
CLOSE_CONNECTION = "close"
CONTINUE_CONNECTION = "continue"
class UpdateServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
def __init__(self, server_address, request_handler_class, certfile, keyfile, ssl_version=ssl.PROTOCOL_TLSv1, bind_and_activate=True):
SocketServer.TCPServer.__init__(self, server_address, request_handler_class, bind_and_activate)
self.certfile = certfile
self.keyfile = keyfile
self.ssl_version = ssl_version
print "done initializing"
def get_request(self):
print "hi"
new_socket, from_address = self.socket.accept()
# connstream = ssl.wrap_socket(new_socket, server_side=True, keyfile=self.keyfile, certfile=self.certfile, ssl_version=self.ssl_version)
return new_socket, from_address
class UpdateServerRequestHandler(SocketServer.StreamRequestHandler):
"""
Handles one connection to a client
The request protocl is as following:
LENGTH:TOKEN:OPCODE[:[ADDITIONAL_PARAMS]]
The response is as following:
LENGTH:[ADDITIONAL_DATA]
"""
def handle(self):
while True:
data = self.request.recv(RECEIVE_SIZE)
length = data.split(PARAMETER_DELIMITER)[LENGTH_PARAM]
print data
print length
while int(length) > (RECEIVE_SIZE - len(length) - 1):
data += self.request.recv[RECEIVE_SIZE]
if self.parse_command(data) == CLOSE_CONNECTION:
break
def parse_command(self, data):
data = data.split(PARAMETER_DELIMITER)
token = data[TOKEN_PARAM]
opcode = data[OPCODE_PARAM]
if self.verify_token(token):
if opcode == SETTINGS_REQUEST:
print "it's a settings request"
self.wfile.write(self.build_settings_packet(token))
return CONTINUE_CONNECTION
if opcode == NOTIFICATION_UPDATE:
self.create_notification(data[NOTIFICATION_PARAM])
self.wfile.write('OK')
return CLOSE_CONNECTION
@staticmethod
def build_settings_packet(token):
"""
retrieves the settings related to the user with :param token: and structures a response packet with these
settings.
:param token: a string representing a valid token of a user
:return: a string of a valid response to the client requesting the settings. The string follows the protocol
used by the client to parse responses
"""
db = sqlite3.connect(DB_PATH)
cur = db.cursor()
cur.execute("SELECT processes, file_paths, file_extensions FROM users WHERE token=?", (token,))
settings = cur.fetchone()
processes, file_paths, file_extensions = settings
response_packet = processes + PARAMETER_DELIMITER + file_paths + PARAMETER_DELIMITER + file_extensions
response_packet = str(len(response_packet)) + PARAMETER_DELIMITER + response_packet
print response_packet
return response_packet
@staticmethod
def create_notification(notification_data):
pass
@staticmethod
def verify_token(token):
db = sqlite3.connect(DB_PATH)
cur = db.cursor()
cur.execute("SELECT * FROM users WHERE token=?", (token,))
user_data = cur.fetchone()
if user_data is None:
return False
return True
if __name__ == "__main__":
UpdateServer(('127.0.0.1', 5151), UpdateServerRequestHandler, "cert.pem", "key.pem").serve_forever()
|
from struct import calcsize, unpack
from math import fabs
PRECISION = 1e-8
def cmpfloat(filename_a, filename_b, precision = PRECISION):
A = filename_a
B = filename_b
fA = open(A)
fB = open(B)
sizeof_float = calcsize("f")
offset = 0
while 1:
bA = fA.read(sizeof_float)
bB = fB.read(sizeof_float)
assert len(bA) == len(bB), "File %s and %s differ in size." % (A, B)
if len(bA) == 0:
# finished reading both files
break
# both files can be equal size, but that does not mean we have enough for a float
assert len(bA) == sizeof_float, "File %s and %s are not a multiple of sizeof(float)." % (A, B)
# compare one float
xA, = unpack("f", bA)
xB, = unpack("f", bB)
assert fabs(xA - xB) < PRECISION, "File %s and %s differ in content at byte offset %s: value is %.5f versus %.5f." % (A, B, offset, xA, xB)
offset += sizeof_float
if __name__ == "__main__":
import sys
cmpfloat(sys.argv[1], sys.argv[2])
|
import sys, os
import pygtk, gtk, gobject
import pygst
pygst.require("0.10")
import gst
from random import choice
import random
window=int(sys.argv[1], 16)
effects = [
"edgetv",
"agingtv",
"dicetv",
"warptv",
"shagadelictv",
"vertigotv",
"revtv",
"quarktv",
"optv",
"radioactv",
"streaktv",
"rippletv"
]
class Main:
def __init__(self):
self.pipeline = gst.Pipeline("screensaver")
self.webcamsrc = gst.element_factory_make("v4l2src", "video0")
self.webcamflip = gst.element_factory_make("videoflip", "webcamflip")
self.webcamflip.set_property("method", "horizontal-flip")
self.colorspaceIN = gst.element_factory_make("ffmpegcolorspace", "colorspace-in")
self.colorspaceOUT = gst.element_factory_make("ffmpegcolorspace", "colorspace-out")
self.effect = gst.element_factory_make(choice(effects), "effect")
self.videomix = gst.element_factory_make("videomixer", "videomixer")
self.sink = gst.element_factory_make("xvimagesink", "sink")
self.sink.set_xwindow_id(window)
self.pipeline.add_many(self.webcamsrc, self.webcamflip, self.colorspaceIN, self.colorspaceOUT, self.effect, self.sink)
# glue things together
self.webcamsrc.link(self.webcamflip)
self.webcamflip.link(self.colorspaceIN)
self.colorspaceIN.link(self.effect)
self.effect.link(self.colorspaceOUT)
self.colorspaceOUT.link(self.sink)
self.pipeline.set_state(gst.STATE_PLAYING)
start=Main()
gtk.main()
|
"""
AST visitor class to convert Python expressions into C++ as used by ThePEG
"""
import ast
convertHerwig=False
def py2cpp(expr,con=False):
"""Convert expr to C++ form. Wraps the converter class."""
global convertHerwig
convertHerwig=con
result = PyToCpp().parse(expr)
return result
class PyToCppException(Exception):
"""Base class for all PyToCpp exceptions."""
class PyToCpp(ast.NodeVisitor):
"""Convert Python math expressions into C++.
Returns a tuple (expr,syms):
expr -- C++-compatible expression
syms -- set of all free variables
Usage:
>>> expr = '3+2**a*b'
>>> PyToCpp().parse(expr)
('(3.0+(pow(2.0,a)*b))', set(['a', 'b']))
Note:
The converter is currently not generic, it relies on the
conventions of Feynrules' UFO format on the one hand and ThePEG's
C++ types on the other.
"""
def parse(self,expression):
"""Convert expression to C++ format."""
self.result = []
self.symbols = set()
expression=expression.replace("abs(","cmath.abs(")
tree = ast.parse(expression)
#print ast.dump(tree)
return self.visit(tree)
##################################
def visit_Module(self,node):
self.generic_visit(node)
return ''.join(self.result), self.symbols
def generic_visit(self,node):
typename = type(node).__name__
harmless = ['Module','Expr']
if typename not in harmless:
raise PyToCppException('Missing implementation for %s' % typename)
super(PyToCpp,self).generic_visit(node)
def visit_UnaryOp(self,node):
self.result.append('(')
self.visit(node.op)
self.visit(node.operand)
self.result.append(')')
def visit_BinOp(self,node):
if type(node.op) == type(ast.Pow()):
return self.pow_node(node)
self.result.append('(')
self.visit(node.left)
self.visit(node.op)
self.visit(node.right)
self.result.append(')')
def pow_node(self,node):
if is_square(node):
self.result.append('sqr(')
self.visit(node.left)
self.result.append(')')
else:
self.result.append('pow(')
self.visit(node.left)
self.result.append(',')
self.visit(node.right)
self.result.append(')')
def visit_Call(self,node):
if is_ii(node):
self.result.append('ii')
else:
self.visit(node.func)
self.result.append('(')
for a in node.args:
self.visit(a)
self.result.append(',')
if self.result[-1] == ',':
del self.result[-1]
self.result.append(')')
def visit_Attribute(self,node):
if node.value.id != 'cmath':
err = "Don't know how to convert %s module." % node.value.id
raise PyToCppException(err)
self.result.append(node.attr)
def visit_Num(self,node):
# some zeros are encoded as 0j
if node.n == 0: text = '0.0'
elif (node.n==complex("1j") ) :
text = "ii"
elif (node.n==complex("-1j") ) :
text = "-ii"
elif (node.n==complex("2j") ) :
text = "2.*ii"
else:
text = str(float(node.n))
self.result.append(text)
def visit_Name(self,node):
text = str(node.id)
if text == 'complex':
text = 'Complex'
elif text == 'complexconjugate':
text = 'conj'
elif text == 'im':
text = 'imag'
elif text == 're':
text = 'real'
elif text == 'max':
text = 'max'
elif text == 'min':
text = 'min'
elif convertHerwig :
if text == 'I' :
text = "ii"
elif ( text.find("UnitRemoval")==0) :
text = "%s::%s" % (text[:11],text[11:])
elif(text[0]=="P" or text[0]=="E" or text[0] == "V") :
if text[-1] in ["x","y","z","t"] :
text = "%s.%s()" % (text[0:-1],text[-1])
elif(text[0]=="R") :
text = "%s.%s()" % (text[:-3],text[-3:])
elif(text[0]=="s") :
text = "%s.%s()" % (text[:-2],text[-2:])
elif text not in []:
self.symbols.add(text)
self.result.append(text)
def visit_Mult(self,node):
self.result.append('*')
def visit_Add(self,node):
self.result.append('+')
def visit_Sub(self,node):
self.result.append('-')
def visit_USub(self,node):
self.result.append('-')
def visit_UAdd(self,node):
self.result.append('+')
def visit_Div(self,node):
self.result.append('/')
def visit_Pow(self,node):
err = "Shold never get here. BinaryOp catches Pow calls."
raise PyToCppException(err)
def is_square(node):
"""Check if a Pow object is just a square."""
try:
return node.right.n == 2.0
except:
return False
def is_ii(node):
"""Check if a Call object is just the imaginary unit."""
try:
return ( node.func.id == 'complex'
and node.args[0].n == 0
and node.args[1].n == 1 )
except:
return False
if __name__ == "__main__":
import doctest
doctest.testmod()
|
import elements
import copy
import json
import os
class ElementList():
def __init__(self):
self.sorted_atomic_number_symbol_list = None
self.sorted_eds_lines_list = None
self.element_data_dict = None
self.elements_rawdata = elements.elements
self.make_sorted_atomic_number_symbol_list()
self.make_sorted_eds_line_list()
self.make_sorted_eels_edge_list()
self.make_element_data_dict()
def make_sorted_atomic_number_symbol_list(self):
filepath = os.path.dirname(os.path.realpath(__file__)) + "/data/"
f = open(filepath + "element_list.json","r")
temp_element_dict = json.load(f)
self.sorted_atomic_number_symbol_list = temp_element_dict['element_list']
f.close()
def make_element_data_dict(self):
filepath = os.path.dirname(os.path.realpath(__file__)) + "/data/"
f = open(filepath + "element_data_dict.json","r")
temp_element_dict = json.load(f)
self.element_data_dict = temp_element_dict
f.close()
def make_sorted_eels_edge_list(self):
filepath = os.path.dirname(os.path.realpath(__file__)) + "/data/"
f = open(filepath + "eels_edge_list.json","r")
temp_eels_dict = json.load(f)
self.sorted_eels_edge_list = temp_eels_dict['eels_edge_list']
f.close()
def make_sorted_eds_line_list(self):
filepath = os.path.dirname(os.path.realpath(__file__)) + "/data/"
f = open(filepath + "eds_line_list.json","r")
temp_eds_dict = json.load(f)
self.sorted_eds_line_list = temp_eds_dict['eds_line_list']
f.close()
def get_element_data(self, element_symbol):
element_data = self.element_data_dict[element_symbol]
return(element_data)
def get_element_name(self, element_symbol):
name = self.elements_rawdata[element_symbol]['General_properties']['name']
name = name[:1].upper() + name[1:].lower()
return(name)
def get_element_density(self, element_symbol):
density = self.elements_rawdata[element_symbol]['Physical_properties']['density (g/cm^3)']
density = round(density, 2)
return(density)
def get_element_atomic_weight(self, element_symbol):
atomic_weight = self.elements_rawdata[element_symbol]['General_properties']['atomic_weight']
atomic_weight = round(atomic_weight, 2)
return(atomic_weight)
def get_atomic_number_symbol_list(self, search=""):
if search is "":
return(self.sorted_atomic_number_symbol_list)
else:
resultlist = []
for element in self.sorted_atomic_number_symbol_list:
if element['symbol'] == search:
resultlist.append(element)
elif element['symbol'][0] == search:
resultlist.append(element)
elif search in element['name']:
resultlist.append(element)
return(resultlist)
def get_sorted_eds_line_list(self):
return(self.sorted_eds_line_list)
def get_sorted_eels_edge_list(self,min_energy=None, max_energy=None):
return(self.sorted_eels_edge_list)
def _get_unsorted_xray_line_list(self,element_symbol):
try:
xray_lines = self.elements_rawdata[element_symbol]['Atomic_properties']['Xray_lines']
xray_line_list = []
for xray_line_name, xray_data in xray_lines.items():
xray_line_weight = xray_data['weight']
xray_line_weight = round(xray_line_weight, 2)
xray_line_energy = xray_data['energy (keV)']
xray_line_energy = round(xray_line_energy,3)
xray_line_list.append([xray_line_name,xray_line_weight,xray_line_energy])
return(xray_line_list)
except:
return(None)
def get_xray_line_list(self, element_symbol):
xray_line_list = self._get_unsorted_xray_line_list(element_symbol)
xray_line_list.sort(key=lambda xray_energy: xray_energy[2])
sorted_xray_line_list = []
for xray_line in xray_line_list:
sorted_xray_line_list.append(
{'name':xray_line[0],'weight':xray_line[1],'energy':xray_line[2]})
return(sorted_xray_line_list)
def _get_unsorted_eels_edge_list(self, element_symbol):
try:
eels_edges = self.elements_rawdata[element_symbol]['Atomic_properties']['Binding_energies']
eels_edge_list = []
for eels_edge_name, eels_data in eels_edges.items():
eels_edge_factor = eels_data['factor']
eels_edge_factor = round(eels_edge_factor, 2)
eels_edge_energy = eels_data['onset_energy (eV)']
eels_edge_relevance = eels_data['relevance']
eels_edge_list.append([eels_edge_name,eels_edge_factor,eels_edge_energy,eels_edge_relevance])
return(eels_edge_list)
except:
return(None)
def get_eels_edge_list(self, element_symbol):
eels_edge_list = self._get_unsorted_eels_edge_list(element_symbol)
eels_edge_list.sort(key=lambda xray_energy: xray_energy[2])
sorted_eels_edge_list = []
for eels_edge in eels_edge_list:
sorted_eels_edge_list.append(
{'name':eels_edge[0],'factor':eels_edge[1],'energy':eels_edge[2],'relevance':eels_edge[3]})
return(sorted_eels_edge_list)
element_list = ElementList()
|
"""
**types.py**
**Platform:**
Windows, Linux, Mac Os X.
**Description:**
Defines Application Database types: :class:`IblSet`, :class:`Template`
and :class:`Collection` classes.
**Others:**
"""
from __future__ import unicode_literals
import os
import sqlalchemy.ext.declarative
from sqlalchemy import ForeignKey
import foundations.exceptions
import foundations.parsers
import foundations.verbose
from foundations.parsers import SectionsFileParser
__author__ = "Thomas Mansencal"
__copyright__ = "Copyright (C) 2008 - 2014 - Thomas Mansencal"
__license__ = "GPL V3.0 - http://www.gnu.org/licenses/"
__maintainer__ = "Thomas Mansencal"
__email__ = "thomas.mansencal@gmail.com"
__status__ = "Production"
__all__ = ["LOGGER", "Base", "IblSet", "Template", "Collection"]
LOGGER = foundations.verbose.install_logger()
Base = sqlalchemy.ext.declarative.declarative_base()
class IblSet(Base):
"""
Defines the Database ibl_sets type.
"""
__tablename__ = "ibl_sets"
"""
:param __tablename__: Table name.
:type __tablename__: unicode
"""
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String)
path = sqlalchemy.Column(sqlalchemy.String)
os_stats = sqlalchemy.Column(sqlalchemy.String)
collection = sqlalchemy.Column(sqlalchemy.Integer, ForeignKey("collections.id"))
title = sqlalchemy.Column(sqlalchemy.String)
author = sqlalchemy.Column(sqlalchemy.String)
link = sqlalchemy.Column(sqlalchemy.String)
icon = sqlalchemy.Column(sqlalchemy.String)
preview_image = sqlalchemy.Column(sqlalchemy.String)
background_image = sqlalchemy.Column(sqlalchemy.String)
lighting_image = sqlalchemy.Column(sqlalchemy.String)
reflection_image = sqlalchemy.Column(sqlalchemy.String)
location = sqlalchemy.Column(sqlalchemy.String)
latitude = sqlalchemy.Column(sqlalchemy.String)
longitude = sqlalchemy.Column(sqlalchemy.String)
date = sqlalchemy.Column(sqlalchemy.String)
time = sqlalchemy.Column(sqlalchemy.String)
comment = sqlalchemy.Column(sqlalchemy.String)
def __init__(self,
name=None,
path=None,
os_stats=None,
collection=None,
title=None,
author=None,
link=None,
icon=None,
preview_image=None,
background_image=None,
lighting_image=None,
reflection_image=None,
location=None,
latitude=None,
longitude=None,
date=None,
time=None,
comment=None):
"""
Initializes the class.
:param name: Ibl Set name.
:type name: unicode
:param path: Ibl Set file path.
:type path: unicode
:param os_stats: Ibl Set file statistics.
:type os_stats: unicode
:param collection: Ibl Set collection.
:type collection: unicode
:param title: Ibl Set title.
:type title: unicode
:param author: Ibl Set author.
:type author: unicode
:param link: Ibl Set online link.
:type link: unicode
:param icon: Ibl Set icon path.
:type icon: unicode
:param preview_image: Ibl Set preview image path.
:type preview_image: unicode
:param background_image: Ibl Set background image path.
:type background_image: unicode
:param lighting_image: Ibl Set lighting image path.
:type lighting_image: unicode
:param reflection_image: Ibl Set reflection image path.
:type reflection_image: unicode
:param location: Ibl Set location.
:type location: unicode
:param latitude: Ibl Set latitude.
:type latitude: unicode
:param longitude: Ibl Set longitude.
:type longitude: unicode
:param date: Ibl Set shot date.
:type date: unicode
:param time: Ibl Set shot time.
:type time: unicode
:param comment: Ibl Set comment.
:type comment: unicode
"""
LOGGER.debug("> Initializing '{0}()' class.".format(self.__class__.__name__))
# --- Setting class attributes. ---
self.name = name
self.path = path
self.os_stats = os_stats
self.collection = collection
self.title = title
self.author = author
self.link = link
self.icon = icon
self.preview_image = preview_image
self.background_image = background_image
self.lighting_image = lighting_image
self.reflection_image = reflection_image
self.location = location
self.latitude = latitude
self.longitude = longitude
self.date = date
self.time = time
self.comment = comment
@foundations.exceptions.handle_exceptions(foundations.exceptions.FileStructureParsingError)
def set_content(self):
"""
Initializes the class attributes.
:return: Method success.
:rtype: bool
"""
sections_file_parser = SectionsFileParser(self.path)
sections_file_parser.parse()
if sections_file_parser.sections:
self.title = sections_file_parser.get_value("Name", "Header")
self.author = sections_file_parser.get_value("Author", "Header")
self.link = sections_file_parser.get_value("Link", "Header")
self.icon = os.path.normpath(os.path.join(os.path.dirname(self.path),
sections_file_parser.get_value("ICOfile", "Header"))) \
if sections_file_parser.get_value("ICOfile", "Header") else None
self.preview_image = os.path.normpath(os.path.join(os.path.dirname(self.path),
sections_file_parser.get_value("PREVIEWfile", "Header"))) \
if sections_file_parser.get_value("PREVIEWfile", "Header") else None
self.background_image = os.path.normpath(os.path.join(os.path.dirname(self.path),
sections_file_parser.get_value("BGfile",
"Background"))) \
if sections_file_parser.get_value("BGfile", "Background") else None
self.lighting_image = os.path.normpath(os.path.join(os.path.dirname(self.path),
sections_file_parser.get_value("EVfile", "Enviroment"))) \
if sections_file_parser.get_value("EVfile", "Enviroment") else None
self.reflection_image = os.path.normpath(os.path.join(os.path.dirname(self.path),
sections_file_parser.get_value("REFfile",
"Reflection"))) \
if sections_file_parser.get_value("REFfile", "Reflection") else None
self.location = sections_file_parser.get_value("Location", "Header")
self.latitude = sections_file_parser.get_value("GEOlat", "Header")
self.longitude = sections_file_parser.get_value("GEOlong", "Header")
self.date = sections_file_parser.get_value("Date", "Header")
self.time = sections_file_parser.get_value("Time", "Header")
self.comment = sections_file_parser.get_value("Comment", "Header")
return True
else:
raise foundations.exceptions.FileStructureParsingError(
"{0} | '{1}' no sections found, file structure seems invalid!".format(self.__class__.__name__,
self.path))
class Template(Base):
"""
Defines the Database Template type.
"""
__tablename__ = "templates"
"""
:param __tablename__: Table name.
:type __tablename__: unicode
"""
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String)
path = sqlalchemy.Column(sqlalchemy.String)
os_stats = sqlalchemy.Column(sqlalchemy.String)
collection = sqlalchemy.Column(sqlalchemy.Integer, ForeignKey("collections.id"))
help_file = sqlalchemy.Column(sqlalchemy.String)
title = sqlalchemy.Column(sqlalchemy.String)
author = sqlalchemy.Column(sqlalchemy.String)
email = sqlalchemy.Column(sqlalchemy.String)
url = sqlalchemy.Column(sqlalchemy.String)
release = sqlalchemy.Column(sqlalchemy.String)
date = sqlalchemy.Column(sqlalchemy.String)
software = sqlalchemy.Column(sqlalchemy.String)
version = sqlalchemy.Column(sqlalchemy.String)
renderer = sqlalchemy.Column(sqlalchemy.String)
output_script = sqlalchemy.Column(sqlalchemy.String)
comment = sqlalchemy.Column(sqlalchemy.String)
def __init__(self,
name=None,
path=None,
os_stats=None,
collection=None,
help_file=None,
title=None,
author=None,
email=None,
url=None,
release=None,
date=None,
software=None,
version=None,
renderer=None,
output_script=None,
comment=None):
"""
Initializes the class.
:param name: Template name.
:type name: unicode
:param path: Template file path.
:type path: unicode
:param os_stats: Template file statistics.
:type os_stats: unicode
:param collection: Template collection.
:type collection: unicode
:param help_file: Template help file path.
:type help_file: unicode
:param title: Template title.
:type title: unicode
:param author: Template author.
:type author: unicode
:param email: Template author email.
:type email: unicode
:param url: Template online link.
:type url: unicode
:param release: Template release version.
:type release: unicode
:param date: Template release date.
:type date: unicode
:param software: Template target software.
:type software: unicode
:param version: Template target software version.
:type version: unicode
:param renderer: Template target renderer.
:type renderer: unicode
:param output_script: Template loader script name.
:type output_script: unicode
:param comment: Template comment.
:type comment: unicode
"""
LOGGER.debug("> Initializing '{0}()' class.".format(self.__class__.__name__))
# --- Setting class attributes. ---
self.name = name
self.path = path
self.os_stats = os_stats
self.collection = collection
self.help_file = help_file
self.title = title
self.author = author
self.email = email
self.url = url
self.release = release
self.date = date
self.software = software
self.version = version
self.renderer = renderer
self.output_script = output_script
self.comment = comment
@foundations.exceptions.handle_exceptions(foundations.exceptions.FileStructureParsingError)
def set_content(self):
"""
Initializes the class attributes.
:return: Method success.
:rtype: bool
"""
sections_file_parser = SectionsFileParser(self.path)
sections_file_parser.parse(raw_sections=("Script"))
if sections_file_parser.sections:
self.help_file = foundations.parsers.get_attribute_compound("HelpFile",
sections_file_parser.get_value("HelpFile",
"Template")).value and \
os.path.join(os.path.dirname(self.path),
foundations.parsers.get_attribute_compound("HelpFile",
sections_file_parser.get_value(
"HelpFile",
"Template")).value) or None
self.title = foundations.parsers.get_attribute_compound("Name",
sections_file_parser.get_value("Name",
"Template")).value
self.author = foundations.parsers.get_attribute_compound("Author",
sections_file_parser.get_value("Author",
"Template")).value
self.email = foundations.parsers.get_attribute_compound("Email",
sections_file_parser.get_value("Email",
"Template")).value
self.url = foundations.parsers.get_attribute_compound("Url",
sections_file_parser.get_value("Url",
"Template")).value
self.release = foundations.parsers.get_attribute_compound("Release",
sections_file_parser.get_value("Release",
"Template")).value
self.date = foundations.parsers.get_attribute_compound("Date",
sections_file_parser.get_value("Date",
"Template")).value
self.software = foundations.parsers.get_attribute_compound("Software",
sections_file_parser.get_value("Software",
"Template")).value
self.version = foundations.parsers.get_attribute_compound("Version",
sections_file_parser.get_value("Version",
"Template")).value
self.renderer = foundations.parsers.get_attribute_compound("Renderer",
sections_file_parser.get_value("Renderer",
"Template")).value
self.output_script = foundations.parsers.get_attribute_compound("OutputScript",
sections_file_parser.get_value(
"OutputScript", "Template")).value
self.comment = foundations.parsers.get_attribute_compound("Comment",
sections_file_parser.get_value("Comment",
"Template")).value
return True
else:
raise foundations.exceptions.FileStructureParsingError(
"{0} | '{1}' no sections found, file structure seems invalid!".format(self.__class__.__name__,
self.path))
class Collection(Base):
"""
Defines the Database Collection type.
"""
__tablename__ = "collections"
"""
:param __tablename__: Table name.
:type __tablename__: unicode
"""
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String)
type = sqlalchemy.Column(sqlalchemy.String)
comment = sqlalchemy.Column(sqlalchemy.String)
def __init__(self, name=None, type=None, comment=None):
"""
Initializes the class.
:param name: Collection name.
:type name: unicode
:param type: Collection type.
:type type: unicode
:param comment: Collection comment.
:type comment: unicode
"""
LOGGER.debug("> Initializing '{0}()' class.".format(self.__class__.__name__))
# --- Setting class attributes. ---
self.name = name
self.type = type
self.comment = comment
|
import math
import sys
from Crypto.PublicKey import RSA
keypair = RSA.generate(1024)
keypair.p = 275127860351348928173285174381581152299
keypair.q = 319576316814478949870590164193048041239
keypair.e = 65537
keypair.n = keypair.p * keypair.q
Qn = long((keypair.p-1) * (keypair.q-1))
i = 1
while (True):
x = (Qn * i ) + 1
if (x % keypair.e == 0):
keypair.d = x / keypair.e
break
i += 1
private = open('private.pem','w')
private.write(keypair.exportKey())
private.close()
|
from django.db.models.signals import post_syncdb
from django.contrib.sites import models as sites_models
from shortim.signals import change_site_domain, create_first_shorturl
from shortim import models as shortim_models
post_syncdb.connect(change_site_domain, sender=sites_models)
post_syncdb.connect(create_first_shorturl, sender=shortim_models)
|
"""
This module manages is a client for the TS and manages the transformations associated to the productions
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = "$Id $"
import json
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient
from DIRAC.ProductionSystem.Client.ProductionClient import ProductionClient
class ProdTransManager(object):
def __init__(self):
self.transClient = TransformationClient()
self.prodClient = ProductionClient()
def deleteTransformations(self, transIDs):
"""Delete given transformations from the TS
:param list transIDs: a list of Transformation IDs
"""
gLogger.notice("Deleting transformations %s from the TS" % transIDs)
for transID in transIDs:
res = self.transClient.deleteTransformation(transID)
if not res["OK"]:
return res
return S_OK()
def deleteProductionTransformations(self, prodID):
"""Delete the production transformations from the TS
:param int prodID: the ProductionID
"""
res = self.prodClient.getProductionTransformations(prodID)
if res["OK"]:
transList = res["Value"]
gLogger.notice("Deleting production transformations %s from the TS" % transList)
for trans in transList:
transID = trans["TransformationID"]
res = self.transClient.deleteTransformation(transID)
if not res["OK"]:
gLogger.error(res["Message"])
return S_OK()
def addTransformationStep(self, stepID, prodID):
"""Add the transformation step to the TS
:param int stepID: the production step ID
:param int prodID: the production ID
:return:
"""
res = self.prodClient.getProductionStep(stepID)
if not res["OK"]:
return res
prodStep = res["Value"]
gLogger.notice("Add step %s to production %s" % (prodStep[0], prodID))
stepDesc = prodStep[2]
stepLongDesc = prodStep[3]
stepBody = prodStep[4]
stepType = prodStep[5]
stepPlugin = prodStep[6]
stepAgentType = prodStep[7]
stepGroupsize = prodStep[8]
stepInputquery = json.loads(prodStep[9])
stepOutputquery = json.loads(prodStep[10])
stepName = "%08d" % prodID + "_" + prodStep[1]
res = self.transClient.addTransformation(
stepName,
stepDesc,
stepLongDesc,
stepType,
stepPlugin,
stepAgentType,
"",
groupSize=stepGroupsize,
body=stepBody,
inputMetaQuery=stepInputquery,
outputMetaQuery=stepOutputquery,
)
if not res["OK"]:
return S_ERROR(res["Message"])
return S_OK(res["Value"])
def executeActionOnTransformations(self, prodID, action):
"""Wrapper to start/stop/clean the transformations of a production
:param int prodID: the production ID
:param str action: it can be start/stop/clean
"""
# Check if there is any action to do
if not action:
return S_OK()
# Get the transformations of the production
res = self.prodClient.getProductionTransformations(prodID)
if not res["OK"]:
return res
transList = res["Value"]
method = getattr(self.transClient, action)
gLogger.notice("Executing action %s to %s" % (action, transList))
# Execute the action on each transformation
for trans in transList:
transID = trans["TransformationID"]
res = method(transID)
if not res["OK"]:
return res
return S_OK()
|
import argparse
import logging
import os.path
import signal
import sys
import dbus.mainloop.glib
import gobject
if not ('/usr/lib/python2.7/site-packages' in sys.path):
sys.path.append('/usr/lib/python2.7/site-packages')
from tpfancod import settings, control
class Tpfancod(object):
"""main tpfancod process"""
controller = None
mainloop = None
debug = False
quiet = False
no_ibm_thermal = False
# version
version = '1.0.0'
# path to config file
config_path = '/etc/tpfancod/settings.conf'
current_profile = 'profile_standard'
# path to the fan control interface
ibm_fan = '/proc/acpi/ibm/fan'
# path to the thermal sensors interface
ibm_thermal = '/proc/acpi/ibm/thermal'
# path to the directory that contains profiles
supplied_profile_dir = '/usr/share/tpfancod-profiles/'
# path to pid file
pid_path = '/var/run/tpfancod.pid'
# poll time
poll_time = 3500
# kernel watchdog time
# the thinkpad_acpi watchdog accepts intervals between 1 and 120 seconds
# for safety reasons one shouldn't use values higher than 5 seconds
watchdog_time = 5
def __init__(self):
logging.basicConfig(stream=sys.stdout,
format='%(asctime)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
self.logger = logging.getLogger(__name__)
self.parse_command_line_args()
self.start_fan_control()
def parse_command_line_args(self):
"""evaluate command line arguments"""
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--debug', help='enable debugging output',
action='store_true')
parser.add_argument('-n', '--noibmthermal', help='use hwmon sensors even if /proc/acpi/ibm/thermal is present',
action='store_true')
parser.add_argument('-q', '--quiet', help='minimize console output',
action='store_true')
parser.add_argument(
'-c', '--config', help='alternate location for the configuration file')
parser.add_argument(
'-P', '--pid', help='alternate location for the PID file of the running process')
parser.add_argument(
'-p', '--profiles', help='alternate location for the directory containing fan control profiles')
args = parser.parse_args()
self.debug = args.debug
self.quiet = args.quiet
self.no_ibm_thermal = args.noibmthermal
if args.config:
self.config_path = args.config
if args.pid:
self.pid_path = args.pid
if args.profiles:
self.data_dir = args.profiles
def start_fan_control(self):
"""daemon start function"""
if not self.quiet:
print 'tpfancod ' + self.version + ' - Copyright (C) 2011-2015 Vladyslav Shtabovenko'
print 'Copyright (C) 2007-2008 Sebastian Urban'
print 'This program comes with ABSOLUTELY NO WARRANTY'
print
print 'WARNING: THIS PROGRAM MAY DAMAGE YOUR COMPUTER.'
print ' PROCEED ONLY IF YOU KNOW HOW TO MONITOR SYSTEM TEMPERATURE.'
print
if self.debug:
self.logger.setLevel(logging.DEBUG)
else:
self.logger.setLevel(logging.ERROR)
self.logger.debug('Running in debug mode')
if not self.is_system_suitable():
print 'Fatal error: unable to set fanspeed, enable watchdog or read temperature'
print ' Please make sure you are root and a recent'
print ' thinkpad_acpi module is loaded with fan_control=1'
print ' If thinkpad_acpi is already loaded, check that'
print ' /proc/acpi/ibm/thermal exists. Thinkpad models'
print ' that doesn\'t have this file are currently unsupported'
exit(1)
if os.path.isfile(self.pid_path):
print 'Fatal error: already running or ' + self.pid_path + ' left behind'
exit(1)
# go into daemon mode
self.daemonize()
def is_system_suitable(self):
"""returns True iff fan speed setting, watchdog and thermal reading is supported by kernel and
we have write permissions"""
try:
fanfile = open(self.ibm_fan, 'w')
fanfile.write('level auto')
fanfile.flush()
fanfile.close()
fanfile = open(self.ibm_fan, 'w')
fanfile.write('watchdog 5')
fanfile.flush()
fanfile.close()
return True
except IOError:
return False
def daemonize(self):
"""turns the current process into a daemon"""
if not self.debug: # don't go into daemon mode if debug mode is active
"""go into daemon mode"""
# from: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66012
# do the UNIX double-fork magic, see Stevens' "Advanced
# Programming in the UNIX Environment" for details (ISBN
# 0201563177)
try:
pid = os.fork()
if pid > 0: # exit first parent
sys.exit(0)
except OSError, e:
print >>sys.stderr, 'fork #1 failed: %d (%s)' % (
e.errno, e.strerror)
sys.exit(1)
# decouple from parent environment
os.chdir('/')
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError, e:
print >>sys.stderr, 'fork #2 failed: %d (%s)' % (
e.errno, e.strerror)
sys.exit(1)
# write pid file
try:
pidfile = open(self.pid_path, 'w')
pidfile.write(str(os.getpid()) + '\n')
pidfile.close()
except IOError:
print >>sys.stderr, 'could not write pid-file: ', self.pid_path
sys.exit(1)
# start the daemon main loop
self.daemon_main()
def daemon_main(self):
"""daemon entry point"""
# register SIGTERM handler
signal.signal(signal.SIGTERM, self.term_handler)
# register d-bus service
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
system_bus = dbus.SystemBus()
name = dbus.service.BusName(
'org.tpfanco.tpfancod', system_bus)
# create and load configuration
act_settings = settings.Settings(
name, '/Settings', self.debug, self.quiet, self.no_ibm_thermal, self.version, self.config_path, self.current_profile, self.ibm_fan, self.ibm_thermal, self.supplied_profile_dir, self.poll_time, self.watchdog_time)
# create controller
self.controller = control.Control(
name, '/Control', act_settings)
# start glib main loop
self.mainloop = gobject.MainLoop()
self.mainloop.run()
def term_handler(self, signum, frame):
"""handles SIGTERM"""
self.controller.set_speed(255)
try:
os.remove(self.pid_path)
except:
pass
self.mainloop.quit()
def main():
Tpfancod()
if __name__ == '__main__':
main()
|
from PyQt4.QtGui import QGridLayout, QLabel, QPushButton, QMessageBox
from basedialog import VDialog
class DMessage(VDialog):
def __init__(self, parent):
VDialog.__init__(self, parent)
self.resize(450, 150)
self.gLayout = QGridLayout(self)
self.mesaj = QLabel(self)
self.gLayout.addWidget(self.mesaj, 0, 0, 1, 3)
self.pSil = QPushButton(self)
self.pSil.setMinimumSize(100, 0)
self.pSil.setMaximumSize(100, 25)
self.gLayout.addWidget(self.pSil, 1, 0, 1, 1)
self.pBirak = QPushButton(self)
self.pBirak.setMinimumSize(100, 0)
self.pBirak.setMaximumSize(100, 25)
self.gLayout.addWidget(self.pBirak, 1, 2, 1, 1)
self.pKarantina = QPushButton(self)
self.pKarantina.setMinimumSize(100, 0)
self.pKarantina.setMaximumSize(100, 25)
self.gLayout.addWidget(self.pKarantina, 1, 1, 1, 1)
self.pBirak.clicked.connect(self.birak)
self.pKarantina.clicked.connect(self.karantina)
self.pSil.clicked.connect(self.sil)
self.setWindowTitle(u"Virüs Bulundu!")
self.mesaj.setText(u"Zmanorka 816 Torsis B bulundu. İkinci derece tehlikleli bir trojan.<br>Sileyim mi, karantinaya mı alayım, bırakayım gitsin mi?")
self.pSil.setText(u"Sil")
self.pBirak.setText(u"Bırak Gitsin")
self.pKarantina.setText(u"Karantinaya Al")
def karantina(self):
QMessageBox.information(self, u"Alamadım!", u"Tam karantinaya alırken kaçtı!", u"Afferin!")
self.close()
def sil(self):
QMessageBox.information(self, u"Silinmiyor...", u"Silgim bitmiş :(", u"Hay Senin!")
self.close()
def birak(self):
QMessageBox.information(self, u"Bırakmam!", u"Böylesi bulunmaz bir daha!", u"Bırak Lan!")
QMessageBox.information(self, u"Bağırma!", u"Bağırma, adamı hasta etme!<br> Efendi ol canımı ye...", u"Özür dilerim!")
self.close()
@staticmethod
def getOption():
print "amk"
|
"""Integration: njsscan."""
from pathlib import Path
from nodejsscan import (
settings,
utils,
)
from njsscan.njsscan import NJSScan
from njsscan.settings import (
IGNORE_PATHS,
NODEJS_FILE_EXTENSIONS,
TEMPLATE_FILE_EXTENSIONS,
)
def all_files(path, search=False, term=None):
"""Gather all files or search."""
filelist = []
ignote_paths = IGNORE_PATHS.union(settings.IGNORE_PATHS)
supported_ext = NODEJS_FILE_EXTENSIONS.union(TEMPLATE_FILE_EXTENSIONS)
for file_path in Path(path).rglob('*'):
if not file_path.is_file():
continue
if file_path.suffix not in supported_ext:
continue
if any(ignore in file_path.as_posix()
for ignore in ignote_paths):
continue
relative = file_path.as_posix().replace(
settings.UPLOAD_FOLDER, '', 1)
if relative.startswith('/'):
relative = relative.replace('/', '', 1)
if search:
if term in utils.read_file(file_path.as_posix()):
filelist.append(relative)
else:
filelist.append(relative)
return filelist
def call_njsscan(node_source):
"""Call njsscan."""
scanner = NJSScan(
[node_source],
json=True,
check_controls=settings.CHECK_MISSING_CONTROLS)
return scanner.scan()
def add_ids(res):
"""Add hash to findings."""
if not res:
return
for rule, finds in res.items():
if not finds.get('files'):
res[rule]['id'] = utils.sha256_finding(finds)
continue
for file in finds['files']:
uniq = {
'file': file,
'rule': rule}
file['id'] = utils.sha256_finding(uniq)
def scan(node_source):
"""Perform scan."""
print('[INFO] Performing Static Analysis')
result = call_njsscan(node_source)
add_ids(result.get('nodejs'))
add_ids(result.get('templates'))
result['files'] = all_files(node_source)
return result
|
from django import forms
from django.core import serializers
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from django.db.models.fields.related import ManyToManyRel
from .models import Customer, Product, Cart
class CustomerChangeForm(UserChangeForm):
class Meta(UserChangeForm.Meta):
model = Customer
selected_products = forms.ModelMultipleChoiceField(Product.objects.all()
,widget=admin.widgets.FilteredSelectMultiple('Products', False)
,required=False)
def __init__(self, *args, **kwargs):
super(CustomerChangeForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.initial['selected_products'] = [customer_cart.product for customer_cart in Cart.objects.filter(customer__user=self.instance.pk)]
self.fields['selected_products'].widget = admin.widgets.RelatedFieldWidgetWrapper(self.fields['selected_products'].widget
,ManyToManyRel(Product)
,admin.site)
def save(self, *args, **kwargs):
instance = super(CustomerChangeForm, self).save(*args, **kwargs)
if instance.pk:
for selected_product in [customer_cart.product for customer_cart in Cart.objects.filter(customer__user=self.instance.pk)]:
if selected_product not in self.cleaned_data['selected_products']:
# remove a product that has been unselected
customer = Customer.objects.get(pk=instance.pk)
Cart.objects.filter(customer__pk=customer.pk, product__pk=selected_product.pk)[0].delete()
for product in self.cleaned_data['selected_products']:
if product not in [customer_cart.product for customer_cart in Cart.objects.filter(customer__user=self.instance.pk)]:
# add newly-selected products
customer = Customer.objects.get(user=instance.pk)
saved_product = serializers.serialize('json', [product], fields=('name', 'attributes'))
Cart.objects.create(customer=customer, product=product, saved_product=saved_product)
return instance
class CustomerAdmin(UserAdmin):
form = CustomerChangeForm
def __init__(self, *args, **kwargs):
super(CustomerAdmin, self).__init__(*args, **kwargs)
fieldsets = (
(None, {'fields': ('username', 'password')}),
(('Cart'), {'fields': ('selected_products',)}),
(('Personal info'), {'classes': ('collapse',)
,'fields': ('first_name', 'last_name', 'email')}),
(('Permissions'), {'classes': ('collapse',)
,'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions')}),
(('Important dates'), {'classes': ('collapse',)
,'fields': ('last_login', 'date_joined')}),
)
|
"""
Tests for DropShadowFrame wiget.
"""
import math
from AnyQt.QtWidgets import (
QMainWindow, QWidget, QListView, QTextEdit, QHBoxLayout, QToolBar,
QVBoxLayout
)
from AnyQt.QtGui import QColor
from AnyQt.QtCore import Qt, QPoint, QPropertyAnimation, QVariantAnimation
from .. import dropshadow
from .. import test
class TestDropShadow(test.QAppTestCase):
def test(self):
lv = QListView()
mw = QMainWindow()
# Add two tool bars, the shadow should extend over them.
mw.addToolBar(Qt.BottomToolBarArea, QToolBar())
mw.addToolBar(Qt.TopToolBarArea, QToolBar())
mw.setCentralWidget(lv)
f = dropshadow.DropShadowFrame(color=Qt.blue, radius=20)
f.setWidget(lv)
self.assertIs(f.parentWidget(), mw)
self.assertIs(f.widget(), lv)
mw.show()
canim = QPropertyAnimation(
f, b"color_", f,
startValue=QColor(Qt.red), endValue=QColor(Qt.blue),
loopCount=-1, duration=2000
)
canim.start()
ranim = QPropertyAnimation(
f, b"radius_", f, startValue=30, endValue=40, loopCount=-1,
duration=3000
)
ranim.start()
self.qWait()
def test1(self):
class FT(QToolBar):
def paintEvent(self, e):
pass
w = QMainWindow()
ftt, ftb = FT(), FT()
ftt.setFixedHeight(15)
ftb.setFixedHeight(15)
w.addToolBar(Qt.TopToolBarArea, ftt)
w.addToolBar(Qt.BottomToolBarArea, ftb)
f = dropshadow.DropShadowFrame()
te = QTextEdit()
c = QWidget()
c.setLayout(QVBoxLayout())
c.layout().setContentsMargins(20, 0, 20, 0)
c.layout().addWidget(te)
w.setCentralWidget(c)
f.setWidget(te)
f.setRadius(15)
f.setColor(Qt.blue)
w.show()
canim = QPropertyAnimation(
f, b"color_", f,
startValue=QColor(Qt.red), endValue=QColor(Qt.blue),
loopCount=-1, duration=2000
)
canim.start()
ranim = QPropertyAnimation(
f, b"radius_", f, startValue=30, endValue=40, loopCount=-1,
duration=3000
)
ranim.start()
self.qWait()
def test_offset(self):
w = QWidget()
w.setLayout(QHBoxLayout())
w.setContentsMargins(30, 30, 30, 30)
ww = QTextEdit()
w.layout().addWidget(ww)
f = dropshadow.DropShadowFrame(radius=20)
f.setWidget(ww)
oanim = QVariantAnimation(
f, startValue=0.0, endValue=2 * math.pi, loopCount=-1,
duration=2000,
)
@oanim.valueChanged.connect
def _(value):
f.setOffset(QPoint(int(15 * math.cos(value)),
int(15 * math.sin(value))))
oanim.start()
w.show()
self.qWait()
if __name__ == "__main__":
test.unittest.main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
def DeterminePackageManager(APTCmd, YUMCmd):
"""Determine and return the package manager using the given command strings."""
PackageManager = "Unknown"
for Cmd in (APTCmd, YUMCmd):
Retval = CoreTools.StartProcess(Cmd, ShowOutput=False)
if Retval != 0:
if Cmd == APTCmd:
#Couldn't find apt!
continue
else:
pass
else:
if Cmd == APTCmd:
#Found APT!
PackageManager = "apt-get"
break
else:
#Found YUM!
PackageManager = "yum"
break
return PackageManager
def GetFSTabInfo(MountPoint, OSName):
"""Get /etc/fstab info and related info (EFI Partition, /boot partition) for the given OS at the given mountpoint."""
#Do some setup.
EFIPartition = "Unknown"
BootPartition = "Unknown"
#Read the raw contents of the /etc/fstab file.
FSTabFile = open(MountPoint+"/etc/fstab", "r")
RawFSTABContents = FSTabFile.read().split("\n")
FSTabFile.close()
#Gather some info from it.
for Line in RawFSTABContents:
#Ignore any comments.
if "#" in Line or Line == "":
continue
#Try to find this OS's EFI and boot partitions (if there are any).
if Line.split()[1] == "/boot/efi" or Line.split()[1] == "/boot":
Temp = Line.split()[0]
#If we have a UUID, convert it into a device node.
if "UUID=" in Temp:
UUID = Temp.split("=")[1]
for Disk in DiskInfo.keys():
if DiskInfo[Disk]["UUID"] == UUID:
Temp = Disk
break
#In case we had a UUID with no match, check again before adding it to OSInfo, else ignore it.
if "/dev/" in Temp:
Disk = Temp
else:
Disk = "Unknown"
#Try to find this OS's /boot partition (if there is one).
if Line.split()[1] == "/boot/efi":
EFIPartition = Disk
elif Line.split()[1] == "/boot":
BootPartition = Disk
#Return stuff.
return (RawFSTABContents, EFIPartition, BootPartition)
def DetermineOSArchitecture(MountPoint):
"""Look for OS architecture on given partition."""
#Do setup.
OSArch = None
Cmd = "arch"
while True:
if MountPoint != "":
Cmd = "chroot "+MountPoint+" "+Cmd
Retval, OSArch = CoreTools.StartProcess(Cmd, ReturnOutput=True)
#If the command failed, try a second approach.
if Retval != 0 and "arch" in Cmd:
Cmd = "file /sbin/init"
elif Retval != 0:
OSArch = None
break
else:
break
#If the command that worked was 'arch', or both failed, we can just return it.
if "arch" in Cmd or Retval != 0:
#Return the arch (or None, if we didn't find it).
return OSArch
else:
if "32-bit" in OSArch:
OSArch = "i386"
else:
OSArch = "x86_64"
return OSArch
def GetOSNameWithLSB(Partition, MountPoint, IsCurrentOS):
"""Attempt to get an OS's name using lsb_release -sd as a fallback."""
if IsCurrentOS:
Cmd = "lsb_release -sd"
else:
Cmd = "chroot "+MountPoint+" lsb_release -sd"
Retval, Output = CoreTools.StartProcess(Cmd, ShowOutput=False, ReturnOutput=True)
if Retval != 0 or Output == "":
return "Unknown"
else:
return Output
|
import os, sys, traceback
from repository.forms import BaseForm
from django.forms.models import ModelChoiceField
from django.core.exceptions import ValidationError
from repository.models import Snippet
class SnippetForm(BaseForm):
class Meta:
model = Snippet
fields = ['name', 'description', 'same_as', # Thing
'license', 'keywords', # CreativeWork
'body' # Article
]
def save(self, owner, commit=True, *args, **kwargs):
obj = super(self.__class__, self).save(commit=False, *args, **kwargs)
if commit:
obj.save()
return obj
|
from __future__ import unicode_literals
from django import forms
class BaseEditor(object):
"""Editors should inherit from this. See wiki.editors for examples."""
# The editor id can be used for conditional testing. If you write your
# own editor class, you can use the same editor_id as some editor
editor_id = 'plaintext'
media_admin = ()
media_frontend = ()
def __init__(self, instance=None):
self.instance = instance
def get_admin_widget(self):
return forms.Textarea()
class AdminMedia:
css = {}
js = ()
class Media:
css = {}
js = ()
|
"""
Description : Open gtk
Authors: Benoit Paquet
Date : October 2012
"""
from CapraVision.server.core.manager import VisionManager
from CapraVision.server.tcp_server import Server
from CapraVision.client.controller.controllerProtobuf import ControllerProtobuf
def run():
# CLASS NOT CURRENTLY USED
# Protobuf
#c = ControllerProtobuf()
print "In main/maingtk. Do you really want to be here?"
# Directly connected to the vision server
c = VisionManager()
if not c.is_connected():
print("Vision server is not accessible.")
return
#server = Server()
# server.start("127.0.0.1", 5030)
# add observer output for "server"
#c.add_filter_output_observer(server.send)
from gi.repository import Gtk, GObject
import CapraVision.client.gtk.main
GObject.threads_init()
# w = CapraVision.client.gtk.main.WinFilterChain(c)
#w.window.show_all()
Gtk.main()
# Close connection
#server.stop()
c.close_server()
if __name__ == '__main__':
# Project path is parent directory
import os
parentdir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
os.sys.path.insert(0, parentdir)
run()
|
"""
:mod: ReqClient
.. module: ReqClient
:synopsis: implementation of client for RequestDB using DISET framework
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
import os
import time
import random
import json
import datetime
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.Utilities.List import randomize, fromChar
from DIRAC.Core.Utilities.JEncode import strToIntDict
from DIRAC.Core.Utilities.DEncode import ignoreEncodeWarning
from DIRAC.ConfigurationSystem.Client import PathFinder
from DIRAC.Core.Base.Client import Client, createClient
from DIRAC.RequestManagementSystem.Client.Request import Request
from DIRAC.RequestManagementSystem.private.RequestValidator import RequestValidator
from DIRAC.WorkloadManagementSystem.Client import JobStatus
from DIRAC.WorkloadManagementSystem.Client import JobMinorStatus
from DIRAC.WorkloadManagementSystem.Client.JobStateUpdateClient import JobStateUpdateClient
from DIRAC.WorkloadManagementSystem.Client.JobMonitoringClient import JobMonitoringClient
@createClient("RequestManagement/ReqManager")
class ReqClient(Client):
"""ReqClient is a class manipulating and operation on Requests."""
__requestProxiesDict = {}
__requestValidator = None
def __init__(self, url=None, **kwargs):
"""c'tor
:param self: self reference
:param url: url of the ReqManager
:param kwargs: forwarded to the Base Client class
"""
super(ReqClient, self).__init__(**kwargs)
self.serverURL = "RequestManagement/ReqManager" if not url else url
self.log = gLogger.getSubLogger("RequestManagement/ReqClient/pid_%s" % (os.getpid()))
def requestProxies(self, timeout=120):
"""get request proxies dict"""
# Forward all the connection options to the requestClient
# (e.g. the userDN to use)
kwargs = self.getClientKWArgs()
kwargs["timeout"] = timeout
if not self.__requestProxiesDict:
self.__requestProxiesDict = {}
proxiesURLs = fromChar(PathFinder.getServiceURL("RequestManagement/ReqProxyURLs"))
if not proxiesURLs:
self.log.warn("CS option RequestManagement/ReqProxyURLs is not set!")
for proxyURL in proxiesURLs:
self.log.debug("creating RequestProxy for url = %s" % proxyURL)
pc = Client(**kwargs)
pc.setServer(proxyURL)
self.__requestProxiesDict[proxyURL] = pc
return self.__requestProxiesDict
def requestValidator(self):
"""get request validator"""
if not self.__requestValidator:
self.__requestValidator = RequestValidator()
return self.__requestValidator
def putRequest(self, request, useFailoverProxy=True, retryMainService=0):
"""Put request to RequestManager
:param self: self reference
:param ~Request.Request request: Request instance
:param bool useFailoverProxy: if False, will not attempt to forward the request to ReqProxies
:param int retryMainService: Amount of time we retry on the main ReqHandler in case of failures
:return: S_OK/S_ERROR
"""
errorsDict = {"OK": False}
valid = self.requestValidator().validate(request)
if not valid["OK"]:
self.log.error("putRequest: request not valid", "%s" % valid["Message"])
return valid
# # dump to json
requestJSON = request.toJSON()
if not requestJSON["OK"]:
return requestJSON
requestJSON = requestJSON["Value"]
retryMainService += 1
while retryMainService:
retryMainService -= 1
setRequestMgr = self._getRPC().putRequest(requestJSON)
if setRequestMgr["OK"]:
return setRequestMgr
errorsDict["RequestManager"] = setRequestMgr["Message"]
# sleep a bit
time.sleep(random.randint(1, 5))
self.log.warn(
"putRequest: unable to set request '%s' at RequestManager" % request.RequestName, setRequestMgr["Message"]
)
proxies = self.requestProxies() if useFailoverProxy else {}
for proxyURL in randomize(proxies.keys()):
proxyClient = proxies[proxyURL]
self.log.debug("putRequest: trying RequestProxy at %s" % proxyURL)
setRequestProxy = proxyClient.putRequest(requestJSON)
if setRequestProxy["OK"]:
if setRequestProxy["Value"]["set"]:
self.log.info(
"putRequest: request '%s' successfully set using RequestProxy %s"
% (request.RequestName, proxyURL)
)
elif setRequestProxy["Value"]["saved"]:
self.log.info(
"putRequest: request '%s' successfully forwarded to RequestProxy %s"
% (request.RequestName, proxyURL)
)
return setRequestProxy
else:
self.log.warn(
"putRequest: unable to set request using RequestProxy %s: %s"
% (proxyURL, setRequestProxy["Message"])
)
errorsDict["RequestProxy(%s)" % proxyURL] = setRequestProxy["Message"]
# # if we're here neither requestManager nor requestProxy were successful
self.log.error("putRequest: unable to set request", "'%s'" % request.RequestName)
errorsDict["Message"] = "ReqClient.putRequest: unable to set request '%s'" % request.RequestName
return errorsDict
def getRequest(self, requestID=0):
"""Get request from RequestDB
:param self: self reference
:param int requestID: ID of the request. If 0, choice is made for you
:return: S_OK( Request instance ) or S_OK() or S_ERROR
"""
self.log.debug("getRequest: attempting to get request.")
getRequest = self._getRPC().getRequest(requestID)
if not getRequest["OK"]:
self.log.error("getRequest: unable to get request", "'%s' %s" % (requestID, getRequest["Message"]))
return getRequest
if not getRequest["Value"]:
return getRequest
return S_OK(Request(getRequest["Value"]))
@ignoreEncodeWarning
def getBulkRequests(self, numberOfRequest=10, assigned=True):
"""get bulk requests from RequestDB
:param self: self reference
:param str numberOfRequest: size of the bulk (default 10)
:return: S_OK( Successful : { requestID, RequestInstance }, Failed : message ) or S_ERROR
"""
self.log.debug("getRequests: attempting to get request.")
getRequests = self._getRPC().getBulkRequests(numberOfRequest, assigned)
if not getRequests["OK"]:
self.log.error("getRequests: unable to get '%s' requests: %s" % (numberOfRequest, getRequests["Message"]))
return getRequests
# No Request returned
if not getRequests["Value"]:
return getRequests
# No successful Request
if not getRequests["Value"]["Successful"]:
return getRequests
jsonReq = getRequests["Value"]["Successful"]
# Do not forget to cast back str keys to int
reqInstances = {int(rId): Request(jsonReq[rId]) for rId in jsonReq}
failed = strToIntDict(getRequests["Value"]["Failed"])
return S_OK({"Successful": reqInstances, "Failed": failed})
def peekRequest(self, requestID):
"""peek request"""
self.log.debug("peekRequest: attempting to get request.")
peekRequest = self._getRPC().peekRequest(int(requestID))
if not peekRequest["OK"]:
self.log.error(
"peekRequest: unable to peek request", "request: '%s' %s" % (requestID, peekRequest["Message"])
)
return peekRequest
if not peekRequest["Value"]:
return peekRequest
return S_OK(Request(peekRequest["Value"]))
def deleteRequest(self, requestID):
"""delete request given it's ID
:param self: self reference
:param str requestID: request ID
"""
requestID = int(requestID)
self.log.debug("deleteRequest: attempt to delete '%s' request" % requestID)
deleteRequest = self._getRPC().deleteRequest(requestID)
if not deleteRequest["OK"]:
self.log.error(
"deleteRequest: unable to delete request", "'%s' request: %s" % (requestID, deleteRequest["Message"])
)
return deleteRequest
def getRequestIDsList(self, statusList=None, limit=None, since=None, until=None, getJobID=False):
"""get at most :limit: request ids with statuses in :statusList:"""
statusList = statusList if statusList else list(Request.FINAL_STATES)
limit = limit if limit else 100
since = since.strftime("%Y-%m-%d") if since else ""
until = until.strftime("%Y-%m-%d") if until else ""
return self._getRPC().getRequestIDsList(statusList, limit, since, until, getJobID)
def getScheduledRequest(self, operationID):
"""get scheduled request given its scheduled OperationID"""
self.log.debug("getScheduledRequest: attempt to get scheduled request...")
scheduled = self._getRPC().getScheduledRequest(operationID)
if not scheduled["OK"]:
self.log.error("getScheduledRequest failed", scheduled["Message"])
return scheduled
if scheduled["Value"]:
return S_OK(Request(scheduled["Value"]))
return scheduled
def getDBSummary(self):
"""Get the summary of requests in the RequestDBs."""
self.log.debug("getDBSummary: attempting to get RequestDB summary.")
dbSummary = self._getRPC().getDBSummary()
if not dbSummary["OK"]:
self.log.error("getDBSummary: unable to get RequestDB summary", dbSummary["Message"])
return dbSummary
def getDigest(self, requestID):
"""Get the request digest given a request ID.
:param self: self reference
:param str requestID: request id
"""
self.log.debug("getDigest: attempting to get digest for '%s' request." % requestID)
digest = self._getRPC().getDigest(int(requestID))
if not digest["OK"]:
self.log.error(
"getDigest: unable to get digest for request", "request: '%s' %s" % (requestID, digest["Message"])
)
return digest
def getRequestStatus(self, requestID):
"""Get the request status given a request id.
:param self: self reference
:param int requestID: id of the request
"""
if isinstance(requestID, six.string_types):
requestID = int(requestID)
self.log.debug("getRequestStatus: attempting to get status for '%d' request." % requestID)
requestStatus = self._getRPC().getRequestStatus(requestID)
if not requestStatus["OK"]:
self.log.error(
"getRequestStatus: unable to get status for request",
": '%d' %s" % (requestID, requestStatus["Message"]),
)
return requestStatus
# def getRequestName( self, requestID ):
# """ get request name for a given requestID """
# return self._getRPC().getRequestName( requestID )
def getRequestInfo(self, requestID):
"""The the request info given a request id.
:param self: self reference
:param int requestID: request nid
"""
self.log.debug("getRequestInfo: attempting to get info for '%s' request." % requestID)
requestInfo = self._getRPC().getRequestInfo(int(requestID))
if not requestInfo["OK"]:
self.log.error(
"getRequestInfo: unable to get status for request",
"request: '%s' %s" % (requestID, requestInfo["Message"]),
)
return requestInfo
def getRequestFileStatus(self, requestID, lfns):
"""Get file status for request given a request id.
:param self: self reference
:param int requestID: request id
:param lfns: list of LFNs
:type lfns: python:list
"""
self.log.debug("getRequestFileStatus: attempting to get file statuses for '%s' request." % requestID)
fileStatus = self._getRPC().getRequestFileStatus(int(requestID), lfns)
if not fileStatus["OK"]:
self.log.verbose(
"getRequestFileStatus: unable to get file status for request",
"request: '%s' %s" % (requestID, fileStatus["Message"]),
)
return fileStatus
def finalizeRequest(self, requestID, jobID, useCertificates=True):
"""check request status and perform finalization if necessary
update the request status and the corresponding job parameter
:param self: self reference
:param str requestID: request id
:param int jobID: job id
"""
stateServer = JobStateUpdateClient(useCertificates=useCertificates)
# Checking if to update the job status - we should fail here, so it will be re-tried later
# Checking the state, first
res = self.getRequestStatus(requestID)
if not res["OK"]:
self.log.error(
"finalizeRequest: failed to get request", "request: %s status: %s" % (requestID, res["Message"])
)
return res
if res["Value"] != "Done":
return S_ERROR(
"The request %s isn't 'Done' but '%s', this should never happen, why are we here?"
% (requestID, res["Value"])
)
# The request is 'Done', let's update the job status. If we fail, we should re-try later
monitorServer = JobMonitoringClient(useCertificates=useCertificates)
res = monitorServer.getJobSummary(int(jobID))
if not res["OK"]:
self.log.error("finalizeRequest: Failed to get job status", "JobID: %d" % jobID)
return res
elif not res["Value"]:
self.log.info("finalizeRequest: job %d does not exist (anymore): finalizing" % jobID)
return S_OK()
else:
jobStatus = res["Value"]["Status"]
jobMinorStatus = res["Value"]["MinorStatus"]
jobAppStatus = ""
newJobStatus = ""
if jobStatus == JobStatus.STALLED:
# If job is stalled, find the previous status from the logging info
res = monitorServer.getJobLoggingInfo(int(jobID))
if not res["OK"]:
self.log.error("finalizeRequest: Failed to get job logging info", "JobID: %d" % jobID)
return res
# Check the last status was Stalled and get the one before
if len(res["Value"]) >= 2 and res["Value"][-1][0] == JobStatus.STALLED:
jobStatus, jobMinorStatus, jobAppStatus = res["Value"][-2][:3]
newJobStatus = jobStatus
# update the job pending request digest in any case since it is modified
self.log.info("finalizeRequest: Updating request digest for job %d" % jobID)
digest = self.getDigest(requestID)
if digest["OK"]:
digest = digest["Value"]
self.log.verbose(digest)
res = stateServer.setJobParameter(jobID, "PendingRequest", digest)
if not res["OK"]:
self.log.info("finalizeRequest: Failed to set job %d parameter: %s" % (jobID, res["Message"]))
return res
else:
self.log.error(
"finalizeRequest: Failed to get request digest for %s: %s" % (requestID, digest["Message"])
)
if jobStatus == JobStatus.COMPLETED:
# What to do? Depends on what we have in the minorStatus
if jobMinorStatus == JobMinorStatus.PENDING_REQUESTS:
newJobStatus = JobStatus.DONE
elif jobMinorStatus == JobMinorStatus.APP_ERRORS:
newJobStatus = JobStatus.FAILED
elif jobMinorStatus == JobMinorStatus.MARKED_FOR_TERMINATION:
# If the job has been Killed, set it Killed
newJobStatus = JobStatus.KILLED
else:
self.log.error(
"finalizeRequest: Unexpected jobMinorStatus", "for %d (got %s)" % (jobID, jobMinorStatus)
)
return S_ERROR("Unexpected jobMinorStatus")
if newJobStatus:
self.log.info("finalizeRequest: Updating job status for %d to %s/Requests done" % (jobID, newJobStatus))
else:
self.log.info(
"finalizeRequest: Updating job minor status",
"for %d to 'Requests done' (current status is %s)" % (jobID, jobStatus),
)
stateUpdate = stateServer.setJobStatus(jobID, newJobStatus, "Requests done", "RMS")
if jobAppStatus and stateUpdate["OK"]:
stateUpdate = stateServer.setJobApplicationStatus(jobID, jobAppStatus, "RMS")
if not stateUpdate["OK"]:
self.log.error(
"finalizeRequest: Failed to set job status",
"JobID: %d, error: %s" % (jobID, stateUpdate["Message"]),
)
return stateUpdate
return S_OK(newJobStatus)
@ignoreEncodeWarning
def getRequestIDsForJobs(self, jobIDs):
"""get the request ids for the supplied jobIDs.
:param self: self reference
:param list jobIDs: list of job IDs (integers)
:return: S_ERROR or S_OK( "Successful": { jobID1: reqID1, jobID2: requID2, ... },
"Failed" : { jobIDn: errMsg, jobIDm: errMsg, ...} )
"""
self.log.verbose("getRequestIDsForJobs: attempt to get request(s) for jobs", "(n=%d)" % len(jobIDs))
res = self._getRPC().getRequestIDsForJobs(jobIDs)
if not res["OK"]:
self.log.error(
"getRequestIDsForJobs: unable to get request(s) for jobs", "%s: %s" % (jobIDs, res["Message"])
)
return res
# Cast the JobIDs back to int
successful = strToIntDict(res["Value"]["Successful"])
failed = strToIntDict(res["Value"]["Failed"])
return S_OK({"Successful": successful, "Failed": failed})
@ignoreEncodeWarning
def readRequestsForJobs(self, jobIDs):
"""read requests for jobs
:param jobIDs: list with jobIDs
:type jobIDs: python:list
:return: S_OK( { "Successful" : { jobID1 : Request, ... },
"Failed" : { jobIDn : "Fail reason" } } )
"""
readReqsForJobs = self._getRPC().readRequestsForJobs(jobIDs)
if not readReqsForJobs["OK"]:
return readReqsForJobs
ret = readReqsForJobs["Value"]
# # create Requests out of JSONs for successful reads
# Do not forget to cast back str keys to int
successful = {int(jobID): Request(jsonReq) for jobID, jsonReq in ret["Successful"].items()}
failed = strToIntDict(ret["Failed"])
return S_OK({"Successful": successful, "Failed": failed})
def resetFailedRequest(self, requestID, allR=False):
"""Reset a failed request to "Waiting" status"""
# # we can safely only peek the request as it is Failed and therefore not owned by an agent
res = self.peekRequest(requestID)
if not res["OK"]:
return res
req = res["Value"]
if allR or recoverableRequest(req):
# Only reset requests that can be recovered
if req.Status != "Failed":
gLogger.notice("Reset NotBefore time, was %s" % str(req.NotBefore))
else:
for i, op in enumerate(req):
op.Error = ""
if op.Status == "Failed":
printOperation((i, op), onlyFailed=True)
for fi in op:
if fi.Status == "Failed":
fi.Attempt = 1
fi.Error = ""
fi.Status = "Waiting"
if op.Status == "Failed":
op.Status = "Waiting"
# Reset also NotBefore
req.NotBefore = datetime.datetime.utcnow().replace(microsecond=0)
return self.putRequest(req)
return S_OK("Not reset")
output = ""
def prettyPrint(mainItem, key="", offset=0):
global output
if key:
key += ": "
blanks = offset * " "
if mainItem and isinstance(mainItem, dict):
output += "%s%s%s\n" % (blanks, key, "{") if blanks or key else ""
for key in sorted(mainItem):
prettyPrint(mainItem[key], key=key, offset=offset)
output += "%s%s\n" % (blanks, "}") if blanks else ""
elif mainItem and isinstance(mainItem, list) or isinstance(mainItem, tuple):
output += "%s%s%s\n" % (blanks, key, "[" if isinstance(mainItem, list) else "(")
for item in mainItem:
prettyPrint(item, offset=offset + 2)
output += "%s%s\n" % (blanks, "]" if isinstance(mainItem, list) else ")")
elif isinstance(mainItem, six.string_types):
if "\n" in mainItem:
prettyPrint(mainItem.strip("\n").split("\n"), offset=offset)
else:
output += "%s%s'%s'\n" % (blanks, key, mainItem)
else:
output += "%s%s%s\n" % (blanks, key, str(mainItem))
output = (
output.replace("[\n%s{" % blanks, "[{")
.replace("}\n%s]" % blanks, "}]")
.replace("(\n%s{" % blanks, "({")
.replace("}\n%s)" % blanks, "})")
.replace("(\n%s(" % blanks, "((")
.replace(")\n%s)" % blanks, "))")
.replace("(\n%s[" % blanks, "[")
.replace("]\n%s)" % blanks, "]")
)
def printFTSJobs(request):
"""Prints the FTSJobs associated to a request
:param request: Request object
"""
try:
if request.RequestID:
# We try first the new FTS3 system
from DIRAC.DataManagementSystem.Client.FTS3Client import FTS3Client
fts3Client = FTS3Client()
res = fts3Client.ping()
if res["OK"]:
associatedFTS3Jobs = []
for op in request:
res = fts3Client.getOperationsFromRMSOpID(op.OperationID)
if res["OK"]:
for fts3Op in res["Value"]:
associatedFTS3Jobs.extend(fts3Op.ftsJobs)
if associatedFTS3Jobs:
# Display the direct url and the status
gLogger.always(
"\n\nFTS3 jobs associated: \n%s"
% "\n".join(
"%s/fts3/ftsmon/#/job/%s (%s)"
% (
job.ftsServer.replace(":8446", ":8449"), # Submission port is 8446, web port is 8449
job.ftsGUID,
job.status,
)
for job in associatedFTS3Jobs
)
)
return
# AttributeError can be thrown because the deserialization will not have
# happened correctly on the new fts3 (CC7 typically), and the error is not
# properly propagated
except AttributeError as err:
gLogger.debug("Could not instantiate FtsClient because of Exception", repr(err))
def printRequest(request, status=None, full=False, verbose=True, terse=False):
global output
if full:
output = ""
prettyPrint(json.loads(request.toJSON()["Value"]))
gLogger.always(output)
else:
if not status:
status = request.Status
gLogger.always(
"Request name='%s' ID=%s Status='%s'%s%s%s"
% (
request.RequestName,
request.RequestID if hasattr(request, "RequestID") else "(not set yet)",
request.Status,
" ('%s' in DB)" % status if status != request.Status else "",
(" Error='%s'" % request.Error) if request.Error and request.Error.strip() else "",
(" Job=%s" % request.JobID) if request.JobID else "",
)
)
gLogger.always(
"Created %s, Updated %s%s"
% (
request.CreationTime,
request.LastUpdate,
(", NotBefore %s" % request.NotBefore) if request.NotBefore else "",
)
)
if request.OwnerDN:
gLogger.always("Owner: '%s', Group: %s" % (request.OwnerDN, request.OwnerGroup))
for indexOperation in enumerate(request):
op = indexOperation[1]
if not terse or op.Status == "Failed":
printOperation(indexOperation, verbose, onlyFailed=terse)
if not terse:
printFTSJobs(request)
def printOperation(indexOperation, verbose=True, onlyFailed=False):
global output
i, op = indexOperation
prStr = ""
if op.SourceSE:
prStr += "SourceSE: %s" % op.SourceSE
if op.TargetSE:
prStr += (" - " if prStr else "") + "TargetSE: %s" % op.TargetSE
if prStr:
prStr += " - "
prStr += "Created %s, Updated %s" % (op.CreationTime, op.LastUpdate)
if op.Type == "ForwardDISET" and op.Arguments:
from DIRAC.Core.Utilities import DEncode
decode, _length = DEncode.decode(op.Arguments)
if verbose:
output = ""
prettyPrint(decode, offset=10)
prStr += "\n Arguments:\n" + output.strip("\n")
else:
prStr += "\n Service: %s" % decode[0][0]
gLogger.always(
" [%s] Operation Type='%s' ID=%s Order=%s Status='%s'%s%s"
% (
i,
op.Type,
op.OperationID if hasattr(op, "OperationID") else "(not set yet)",
op.Order,
op.Status,
(" Error='%s'" % op.Error) if op.Error and op.Error.strip() else "",
(" Catalog=%s" % op.Catalog) if op.Catalog else "",
)
)
if prStr:
gLogger.always(" %s" % prStr)
for indexFile in enumerate(op):
if not onlyFailed or indexFile[1].Status == "Failed":
printFile(indexFile)
def printFile(indexFile):
ind, fi = indexFile
gLogger.always(
" [%02d] ID=%s LFN='%s' Status='%s'%s%s%s"
% (
ind + 1,
fi.FileID if hasattr(fi, "FileID") else "(not set yet)",
fi.LFN,
fi.Status,
(" Checksum='%s'" % fi.Checksum) if fi.Checksum or (fi.Error and "checksum" in fi.Error.lower()) else "",
(" Error='%s'" % fi.Error) if fi.Error and fi.Error.strip() else "",
(" Attempts=%d" % fi.Attempt) if fi.Attempt > 1 else "",
)
)
def recoverableRequest(request):
excludedErrors = (
"File does not exist",
"No such file or directory",
"sourceSURL equals to targetSURL",
"Max attempts limit reached",
"Max attempts reached",
)
operationErrorsOK = ("is banned for", "Failed to perform exists from any catalog")
for op in request:
if op.Status == "Failed" and (
not op.Error or not [errStr for errStr in operationErrorsOK if errStr in op.Error]
):
for fi in op:
if fi.Status == "Failed":
if [errStr for errStr in excludedErrors if errStr in fi.Error]:
return False
return True
return True
|
"""
Started on wed, oct 25th, 2017
@author: carlos.arana
Descripcion:
"""
import pandas as pd
import numpy as np
import sys
module_path = r'D:\PCCS\01_Dmine\Scripts'
if module_path not in sys.path:
sys.path.append(module_path)
from SUN.asignar_sun import asignar_sun
from VarInt.VarInt import VarInt
from SUN_integridad.SUN_integridad import SUN_integridad
from PCCS_variables.PCCS_variables import variables
from ParametroEstandar.ParametroEstandar import ParametroEstandar
from AsignarDimension.AsignarDimension import AsignarDimension
from DocumentarParametro.DocumentarParametro import DocumentarParametro
"""
Las librerias locales utilizadas renglones arriba se encuentran disponibles en las siguientes direcciones:
SCRIPT: | DISPONIBLE EN:
------ | ------------------------------------------------------------------------------------
asignar_sun | https://github.com/INECC-PCCS/01_Dmine/tree/master/Scripts/SUN
SUN_integridad | https://github.com/INECC-PCCS/01_Dmine/tree/master/Scripts/SUN_integridad
variables | https://github.com/INECC-PCCS/01_Dmine/tree/master/Scripts/PCCS_variables
ParametroEstandar | https://github.com/INECC-PCCS/01_Dmine/tree/master/Scripts/ParametroEstandar
AsignarDimension | https://github.com/INECC-PCCS/01_Dmine/tree/master/Scripts/AsignarDimension
DocumentarParametro | https://github.com/INECC-PCCS/01_Dmine/tree/master/Scripts/DocumentarParametro
VarInt | https://github.com/INECC-PCCS/01_Dmine/tree/master/Scripts/VarInt
"""
ClaveParametro = 'P0114'
DescParam = 'Macromedicion (%) Conocimiento real de agua entregada, calculado como cociente de el número de ' \
'macromedidores funcionando en captaciones dividido entre el número de captaciones, multiplicado por cien.' \
'Para la agregación a nivel Clave del SUN se calculó el promedio de porcentajes para los municipios que' \
'componen la ciudad'
UnidadesParam = 'Porcentaje'
NombreParametro = 'Macromedición'
TituloParametro = 'MACROMED' # Para nombrar la columna del parametro
PeriodoParam = '2015'
ContenidoHojaDatos = 'Datos de macromedición disponibles de 2002 a 2015'
ClaveDataset = 'Pigoo'
NomDataset = r'Programa de Indicadores de Gestión de Organismos Operadores'
DescDataset = r'Indicadores municipales generados por los Organismos Operadores de agua, recolectados por el ' \
r'Instituto Mexicano De Tecnologia del Agua y la Secretaría de Medio Ambiente y Recursos Naturales'
Notas = 'S/N'
DescVarIntegridad = 'La variable de integridad municipal para esta Dataset es binaria: \n' \
'1 = El municipio cuenta con informacion \n0 = El municipio no cuenta con información'
NomFuente = 'Programa de Indicadores de Gestión de Organismos Operadores'
UrlFuente = 'http://www.pigoo.gob.mx/index.php?option=com_content&view=article&id=674&Itemid=1677'
ActDatos = '2015'
DispTemp = '2002 a 2015'
PeriodoAct = 'Anual'
DesagrMax = 'Municipal'
DirFuente = r'D:\PCCS\01_Dmine\Datasets\{}'.format(ClaveDataset)
DSBase = '"{}.xlsx", disponible en ' \
'https://github.com/INECC-PCCS/01_Dmine/tree/master/Datasets/{}'.format(ClaveDataset, ClaveDataset)
ClaveDimension = ClaveParametro[1:3]
NomDimension = AsignarDimension(ClaveDimension)['nombre']
DirDimension = ClaveDimension + "_" + AsignarDimension(ClaveDimension)['directorio']
RepoMina = 'https://github.com/INECC-PCCS/01_Dmine/tree/master/{}/{}'.format(DirDimension, ClaveParametro)
DirDestino = r'D:\PCCS\01_Dmine\{}'.format(ClaveDimension+"_"+AsignarDimension(ClaveDimension)['directorio'])
dataset = pd.read_excel(DirFuente + r'\{}.xlsx'.format(ClaveDataset),
sheetname="Macromedición (%)", dtype={'CVE_MUN': str})
dataset.set_index('CVE_MUN', inplace=True)
del dataset['indicador'] # Quitar Columnas que no se utilizarán más
del dataset['ciudad'] # Quitar Columnas que no se utilizarán más
par_dataset = dataset['2015'].rename('Total_Parametro').to_frame()
par_dataset, variables_dataset = VarInt(par_dataset, dataset, tipo = 1)
variables_SUN = ['CVE_MUN', 'NOM_MUN', 'CVE_SUN', 'NOM_SUN', 'NOM_ENT']
DatosLimpios = asignar_sun(par_dataset, vars=variables_SUN)
OrdenColumnas = (variables_SUN + variables_dataset)
DatosLimpios = DatosLimpios[OrdenColumnas] # Reordenar las columnas
dataset.columns = [ClaveParametro+"_"+i for i in list(dataset)]
var_disponibles = list(dataset)
dataset['CVE_MUN'] = dataset.index
hoja_datos = asignar_sun(dataset)
hoja_datos = hoja_datos[(['CVE_MUN', 'CVE_SUN', 'NOM_SUN', 'TIPO_SUN'] + var_disponibles)].set_index('CVE_MUN')
integridad_parametro = SUN_integridad(DatosLimpios)
info_completa = sum(integridad_parametro['INTEGRIDAD']['INTEGRIDAD'] == 1) # Para generar grafico de integridad
info_sin_info = sum(integridad_parametro['INTEGRIDAD']['INTEGRIDAD'] == 0) # Para generar grafico de integridad
info_incomple = 135 - info_completa - info_sin_info # Para generar grafico de integridad
param_dataset = DatosLimpios.set_index('CVE_SUN')
param_dataset['CVE_SUN'] = param_dataset.index
param = param_dataset.groupby(level=0).agg('mean')['Total_Parametro'] # Agregacion por ciudad
intparam = param_dataset.groupby(level=0).agg('mean')['VAR_INTEGRIDAD'] # Integridad por ciudad
Tipo_Sun = integridad_parametro['EXISTENCIA']['TIPO_SUN']
Tipo_Sun = Tipo_Sun.groupby(Tipo_Sun.index).first()
std_nomsun = param_dataset['CVE_SUN'].map(str)+' - '+param_dataset['NOM_SUN'] # Nombres estandar CVE_SUN + NOM_SUN
std_nomsun.drop_duplicates(keep='first', inplace=True)
Parametro = pd.DataFrame()
Parametro['CIUDAD'] = std_nomsun
Parametro['TIPO_SUN'] = Tipo_Sun
Parametro[ClaveParametro] = param
Parametro['INTEGRIDAD'] = intparam
Parametro = Parametro.sort_index()
variables_locales = sorted(list(set(list(DatosLimpios) +
list(dataset) +
list(integridad_parametro['INTEGRIDAD']) +
list(integridad_parametro['EXISTENCIA']) +
list(Parametro))))
metavariables = variables(variables_locales)
d_parametro = {
'DESCRIPCION DEL PARAMETRO': np.nan,
'Clave': ClaveParametro,
'Nombre del Parametro': NombreParametro,
'Descripcion del Parametro': DescParam,
'Periodo' : PeriodoParam,
'Unidades': UnidadesParam
}
d_hojas = {
'METADATOS': 'Descripciones y notas relativas al Dataset',
'PARAMETRO': 'Dataset resultado de la minería, agregado por clave del Sistema Urbano Nacional, '
'para utilizarse en la construcción de Indicadores',
'DATOS': ContenidoHojaDatos,
'INTEGRIDAD': 'Revision de integridad de la información POR CLAVE DEL SUN. '
'Promedio de VAR_INTEGRIDAD de los municipios que componen una ciudad. '
'Si no se tiene información para el municipio, VAR_INTEGRIDAD es igual a cero',
'EXISTENCIA': 'Revision de integridad de la información POR MUNICIPIO.',
' ': np.nan,
'DESCRIPCION DE VARIABLES': np.nan
}
d_mineria = {
' ': np.nan,
'DESCRIPCION DEL PROCESO DE MINERIA:': np.nan,
'Nombre del Dataset': NomDataset,
'Descripcion del dataset': DescDataset,
'Disponibilidad Temporal': DispTemp,
'Periodo de actualizacion': PeriodoAct,
'Nivel de Desagregacion': DesagrMax,
'Notas': Notas,
'Fuente': NomFuente,
'URL_Fuente': UrlFuente,
'Dataset base': DSBase,
'Repositorio de mineria': RepoMina,
'VAR_INTEGRIDAD': DescVarIntegridad,
' ': np.nan,
'HOJAS INCLUIDAS EN EL LIBRO': np.nan
}
descripcion_parametro = pd.DataFrame.from_dict(d_parametro, orient='index').rename(columns={0: 'DESCRIPCION'})
descripcion_mineria = pd.DataFrame.from_dict(d_mineria, orient='index').rename(columns={0: 'DESCRIPCION'})
descripcion_hojas = pd.DataFrame.from_dict(d_hojas, orient='index').rename(columns={0: 'DESCRIPCION'})
MetaParametro = descripcion_parametro.append(descripcion_mineria).append(descripcion_hojas).append(metavariables)
DescParametro = {
'ClaveParametro': ClaveParametro,
'NombreParametro': NombreParametro,
'info_completa': info_completa,
'info_sin_info': info_sin_info,
'info_incomple': info_incomple,
'RutaSalida': DirDestino,
'Clave de Dimension': ClaveDimension,
'Nombre de Dimension': NomDimension,
'Titulo de Columna': TituloParametro,
'Actualizacion de datos': ActDatos
}
ParametroEstandar(DescParametro, MetaParametro, Parametro, DatosLimpios, integridad_parametro, hoja_datos)
DocumentarParametro(DescParametro, MetaParametro, Parametro)
|
"""
Multi-class Logistic Regression exploiting uncertainty in the input features.
"""
import os
import sys
import argparse
import logging
import numpy as np
import torch
from torch import nn
logger = logging.getLogger(__name__)
class MCLR(nn.Module):
""" Multi-class Logistic Regression """
def __init__(
self, dim, n_classes, lam_w=5e-02, trn_iters=500, lr=1e-02, cuda: bool = False
):
"""initialize the model
Args:
----
dim (int): Input feature dimension
n_classes (int): Number of classes
lam_w (float): L2 regularization weight for the parameters
trn_iters (int): Training iterations
lr (float): Learning rate
cuda (bool): CUDA ?
"""
super(MCLR, self).__init__()
self.device = torch.device("cuda" if cuda else "cpu")
self.b = nn.Parameter(torch.randn(1, n_classes) * 0.001, requires_grad=True)
self.W = nn.Parameter(torch.randn(dim, n_classes) * 0.001, requires_grad=True)
self.lam_w = nn.Parameter(torch.Tensor([lam_w]), requires_grad=False)
self.dim = nn.Parameter(torch.LongTensor([dim]), requires_grad=False)
self.trn_iters = trn_iters
self.lr = lr
self.xen_loss = nn.CrossEntropyLoss(reduction="sum")
self.log_softmax = nn.LogSoftmax(dim=1)
def compute_grads(self, compute: bool):
""" compute grads, yes or no """
self.W.requires_grad_(compute)
self.b.requires_grad_(compute)
def forward(self, X):
"""forward
Args:
----
X (torch.Tensor): [n_samples, feat_dim]
Returns:
--------
torch.Tensor: logits
"""
logits = (X @ self.W) + self.b
return logits
# def reg_weights(self):
# """ L2 regularization for weights """
# return (self.W ** 2).sum() * self.lam_w
def loss(self, logits, Y):
""" Compute loss """
xen = self.xen_loss(logits, Y)
return xen
def fit(self, X, Y):
""" Fit or train the model """
if isinstance(X, np.ndarray):
X = torch.from_numpy(X)
Y = torch.from_numpy(Y).to(dtype=torch.long)
# if opt == 'adam':
# print("MCLR: Using AdamW with default learning rate.")
opt = torch.optim.AdamW([self.W, self.b], lr=self.lr, weight_decay=self.lam_w)
train(self, opt, X, Y, self.trn_iters)
def fit_and_validate(
self, x_train, y_train, x_dev, y_dev, out_sfx="", val_iters=1, save=True
):
if isinstance(X, np.ndarray):
X = torch.from_numpy(X)
Y = torch.from_numpy(Y).to(dtype=torch.long)
# if opt == 'adam':
# print("MCLR: Using adam with default learning rate.")
opt = torch.optim.AdamW([self.W, self.b], lr=self.lr, weight_decay=self.lam_w)
train_and_validate(
self,
x_train,
y_train,
x_dev,
y_dev,
opt,
out_sfx,
self.trn_iters,
val_iters,
save,
)
def predict_proba(self, X):
""" Predict posterior probability of class labels """
if isinstance(X, np.ndarray):
X = torch.from_numpy(X).to(device=self.device)
self.compute_grads(False)
logits = self.forward(X)
return torch.exp(self.log_softmax(logits))
def predict(self, X):
if isinstance(X, np.ndarray):
X = torch.from_numpy(X).to(device=self.device)
return predict(self, X)
class MCLRU(nn.Module):
""" Multi-class Logistic Regression with uncertainty """
def __init__(
self,
dim,
n_classes,
lam_w=5e-02,
trn_iters=500,
lr=1e-02,
R=128,
cuda=False,
use_uncert=True,
):
"""initialize the model
Args:
----
dim (int): Input feature dimension
n_classes (int): Number of classes
lam_w (float): L2 regularization weight for the parameters
trn_iters (int): Training iterations
lr (float): Learning rate
R (int): number of Monte Carlo samples
cuda (bool): CUDA ?
use_uncert (bool): Use uncertainties during training ?
"""
super(MCLRU, self).__init__()
self.device = torch.device("cuda" if cuda else "cpu")
self.use_uncert = use_uncert
self.b = nn.Parameter(torch.randn(1, n_classes) * 0.001, requires_grad=True)
self.W = nn.Parameter(torch.randn(dim, n_classes) * 0.001, requires_grad=True)
self.lam_w = nn.Parameter(torch.Tensor([lam_w]), requires_grad=False)
self.R = nn.Parameter(torch.Tensor([R]), requires_grad=False).long()
self.dim = nn.Parameter(torch.LongTensor([dim]), requires_grad=False)
self.trn_iters = trn_iters
self.lr = lr
self.xen_loss = nn.CrossEntropyLoss(reduction="sum")
self.log_softmax = nn.LogSoftmax(dim=2)
def compute_grads(self, compute: bool):
""" compute grads, yes or no """
self.W.requires_grad_(compute)
self.b.requires_grad_(compute)
def sample(self, N):
""" Generate samples from std. Normal """
return torch.randn(size=(self.R, N, self.dim)).to(device=self.device)
def forward(self, X: torch.Tensor):
"""forward
Args:
----
X (torch.Tensor): [n_samples, mean;log_std]
"""
if self.use_uncert:
eps = self.sample(X.shape[0])
# transformation: mean + (eps * std)
X_1 = X[:, : self.dim] + (eps * torch.exp(X[:, self.dim :]))
else:
X_1 = X[:, : self.dim]
logits = (X_1 @ self.W) + self.b
return logits
# def reg_weights(self):
# """ L2 regularization for weights """
# return (self.W ** 2).sum() * self.lam_w
def loss(self, logits, Y):
""" Compute loss """
xen = torch.Tensor([0.0]).to(device=self.device)
if self.use_uncert:
for i in range(self.R):
xen += self.xen_loss(logits[i, :, :], Y)
xen /= self.R.float().to(device=self.device)
else:
xen += self.xen_loss(logits, Y)
return xen
def fit(self, X, Y):
""" Fit or train the model """
if isinstance(X, np.ndarray):
X = torch.from_numpy(X)
Y = torch.from_numpy(Y).to(dtype=torch.long)
# if opt == 'adam':
# print("MCLRU: Using AdamW with default learning rate.")
opt = torch.optim.AdamW([self.W, self.b], lr=self.lr, weight_decay=self.lam_w)
train(self, opt, X, Y, self.trn_iters)
def fit_and_validate(
self, x_train, y_train, x_dev, y_dev, out_sfx="", val_iters=1, save=True
):
if isinstance(X, np.ndarray):
X = torch.from_numpy(X)
Y = torch.from_numpy(Y).to(dtype=torch.long)
# if opt == 'adam':
# print("MCRLU: Using AdamW with default learning rate.")
opt = torch.optim.AdamW([self.W, self.b], lr=self.lr, weight_decay=self.lam_w)
train_and_validate(
self,
x_train,
y_train,
x_dev,
y_dev,
opt,
out_sfx,
self.trn_iters,
val_iters,
save,
)
def predict_proba(self, X):
""" Predict posterior probability of class labels """
if isinstance(X, np.ndarray):
X = torch.from_numpy(X).to(device=self.device)
self.compute_grads(False)
self.use_uncert = True
logits = self.forward(X)
return torch.exp(self.log_softmax(logits)).mean(dim=0)
def predict(self, X):
if isinstance(X, np.ndarray):
X = torch.from_numpy(X).to(device=self.device)
return predict(self, X)
def save_model(model, out_file):
""" Save model """
logger.info("Saving model %s", out_file)
torch.save(model.state_dict(), out_file)
def load_model(model, model_file):
""" Load model """
model.load_state_dict(torch.load(model_file))
return model
def train(model, optim, X, Y, trn_iters):
""" Train the model """
train_dset = torch.utils.data.TensorDataset(X, Y)
train_dataloader = torch.utils.data.DataLoader(
train_dset, batch_size=4096, pin_memory=True)
# train_losses = []
best_loss = torch.Tensor([9999999]).to(model.device)
for i in range(trn_iters):
total_loss = torch.tensor(0.).to(device=model.device)
for x, y in train_dataloader:
x = x.to(device=model.device)
y = y.to(device=model.device)
optim.zero_grad()
logits = model(x)
xen = model.loss(logits, y)
xen.backward()
optim.step()
total_loss += xen.detach().item()
# train_losses.append(xen.detach().item())
info_str = "Iter {:4d} / {:4d} Loss: {:f}".format(
i + 1, trn_iters, total_loss.detach().cpu().item()
)
logger.info(info_str)
if best_loss > total_loss.detach().item():
best_loss = total_loss.detach().item()
else:
logger.info("Early stopping..")
break
model.compute_grads(False)
return model
def train_and_validate(
model,
x_train,
y_train,
x_dev,
y_dev,
optim,
out_sfx,
trn_iters,
val_iters=1,
save=True,
):
""" Train the model and validate after `val_iters` """
x_train = x_train.to(device=model.device)
y_train = y_train.to(device=model.device)
x_dev = x_dev.to(device=model.device)
y_dev = y_dev.to(device=model.device)
# first col: train_acc, second col: dev_acc
scores = np.zeros(shape=(trn_iters, 2), dtype=np.float32)
best_dev_acc = 0.001
best_model_file = ""
for i in range(trn_iters):
optim.zero_grad()
logits = model.forward(x_train)
xen = model.loss(logits, y_train)
xen.backward()
optim.step()
if (i + 1) % val_iters == 0:
# turn of gradient computations
model.compute_grads(False)
scores[i, 0] = (
np.mean(predict(model, x_train) == y_train.cpu().numpy()) * 100
)
scores[i, 1] = np.mean(predict(model, x_dev) == y_dev.cpu().numpy()) * 100
if scores[i, 1] > best_dev_acc:
best_dev_acc = scores[i, 1]
if save:
best_model_file = out_sfx + f"_{i+1}.pt"
# save_model(model, best_model_file)
torch.save(model.state_dict(), best_model_file)
# turn on gradient computations
model.compute_grads(True)
logger.info(
"Iter {:4d}/{:4d} Loss: {:.2f} "
"Train acc: {:.2f} Dev acc: {:.2f}".format(
i + 1, trn_iters, xen.detach().cpu().numpy().item(), *scores[i, :]
)
)
return best_model_file, scores
def predict(model, X):
""" Predict post. prob of classes given the features """
probs = model.predict_proba(X)
return torch.argmax(probs, dim=1).cpu().numpy()
def main():
""" main method """
args = parse_arguments()
clf = MCLRU(5, 4, R=args.R, cuda=False)
X = torch.randn(100, 5 * 2)
Y = torch.randint(0, 4, size=(100,))
X_dev = torch.randn(100, 5 * 2)
Y_dev = torch.randint(0, 4, size=(100,))
opt = torch.optim.AdamW(clf.parameters())
logging.basicConfig(
format="%(asctime)s %(message)s",
datefmt="%d-%m-%Y %H:%M:%S",
filename=os.path.join("/tmp/", f"run.log"),
filemode="w",
level=logging.INFO,
)
logging.getLogger().addHandler(logging.StreamHandler())
# train(clf, opt, X, Y, 5)
# clf.fit(X, Y, opt, 5)
clf.fit_and_validate(X, Y, X_dev, Y_dev, opt, trn_iters=5)
def parse_arguments():
""" parse command line args """
parser = argparse.ArgumentParser(description=__doc__)
# parser.add_argument("train_feats_f", help="path to training feats file")
# parser.add_argument("train_labels_f", help="path to training labels file")
# parser.add_argument("out_dir", help="out dir to save the classifier, results")
parser.add_argument("-trn", default=10, type=int, help="training iters")
parser.add_argument(
"-lw", default=1e-4, type=float, help="L2 reg weight for the model params"
)
parser.add_argument(
"-R", default=1, type=int, help="number of samples for Monte Carlo approx"
)
parser.add_argument("--nocuda", action="store_true", help="do not use cuda")
args = parser.parse_args()
args.cuda = bool(torch.cuda.is_available() and not args.nocuda)
return args
if __name__ == "__main__":
main()
|
import copy
from slist import slist
class ImageDataSet(object):
def __init__(self):
self.objs = []
self.height = 0
self.width = 0
#self.xmins = slist()
#self.xmaxs = slist()
#self.xmin_dict = {}
#self.xmax_dict = {}
def add_obj(self, obj, height = 0, width = 0):
#box = obj.bounding_box()
self.objs.append(obj)
if (height != 0):
self.height = height
if (width != 0):
self.width = width
#self.xmins.insert(box.x1)
#self.xmaxs.insert(box.x2)
#if box.x1 not in self.xmin_dict:
# self.xmin_dict[box.x1] = []
#self.xmin_dict[box.x1].append(obj)
#if box.x2 not in self.xmax_dict:
# self.xmax_dict[box.x2] = []
#self.xmax_dict[box.x2].append(obj)
def __len__(self):
return len(self.objs)
def __getitem__(self, i):
return self.objs[i]
def get_objs(self):
#shallow copy
return copy.copy(self.objs)
def get_intersecting_objs(self, other):
#TODO : is that really useful? Anyway, one can optimize that
ret = []
'''
box = other.bounding_box()
imin = self.xmaxs.find_index(box.x1)
imax = self.xmins.find_index(box.x2)
x1_cur = min([obj.x1 for obj in self.xmax_dict[self.xmaxs[imin]]])
i_x1_cur = self.xmins.find_index(x1_cur)
x1_max = self.xmins[imax]
while x1_cur <= x1_max:
ret += self.xmin_dict[x1_cur]
i_x1_cur += 1
if i_x1_cur >= len(self.xmins):
break
x1_cur = self.xmins[i_x1_cur]
'''
for obj in self.objs:
if obj.bounding_box().overlap(other.bounding_box()):
ret.append(obj)
return ret
def get_gprims(self):
return [obj.get_gprim() for obj in self.objs]
def __str__(self):
ret = "(ImageDataSet : "
for obj in self.objs:
ret += "%s "%(str(obj),)
ret = ret[:-1] + ")"
return ret
def __iter__(self):
return self.objs.__iter__()
class DataSet(object):
def __init__(self, label = ""):
self.images = {}
self.label = label
def __len__(self):
return len(self.images)
def __getitem__(self, i):
if i in self.images:
return self.images[i]
return []
def __iter__(self):
return self.images.__iter__()
def keys(self):
return self.images.keys()
def add_empty_image(self, image):
if image not in self.images:
self.images[image] = ImageDataSet()
else:
print "Warning : DataSet.add_empty_image : image already exists."
def add_obj(self, image, obj, height = 0, width = 0):
if image not in self.images:
self.add_empty_image(image)
self.images[image].add_obj(obj, height, width)
def get_gprims(self, key):
if key in self.images:
return self.images[key].get_gprims()
else:
return []
def get_objs(self, key):
if key in self.images:
return self.images[key].get_objs()
else:
return []
def get_nobjs(self):
n = 0
for i in self.images:
n += len(self.images[i])
return n
def __str__(self):
ret = "(DataSet %s : "%(self.label,)
for img in self.images:
ret += "(%s : %s) "%(img, str(self.images[img]))
#ret[-1] = ")"
return ret[:-1] + ")"
def confidence_max(self):
if self.images == []:
return 0
else:
m = self.images[self.images.keys()[0]].get_objs()[0].confidence
for im in self.images:
for obj in self.images[im].get_objs():
if obj.confidence > m:
m = obj.confidence
return m
def confidence_min(self):
if self.images == []:
return 0
else:
m = self.images[self.images.keys()[0]].get_objs()[0].confidence
for im in self.images:
for obj in self.images[im].get_objs():
if obj.confidence < m:
m = obj.confidence
return m
class DataSetFromMulti(object):
def __init__(self, parent, i_conf_min, label = ""):
self.parent = parent
self.i_conf_min = i_conf_min
self.label = label
def __len__(self):
return len(self.parent) - self.i_conf_min
def __iter__(self):
ret = self.parent.images.__iter__()
for i in xrange(0, self.i_conf_min):
ret.next()
return ret
#return DataSetFromMultiIterator(self, self.i_conf_min)
def __getitem__(self, it):
if type(i) == int:
return self.parent[i+self.i_conf_min]
class DataSetMulti(DataSet):
def __init__(self, label = ""):
DataSet.__init__(self, label)
self.confidences = []
def add_obj(self, confidence, filename, obj, height = 0, width = 0):
DataSet.add_obj(self, filename, obj, height, width)
self.confidences.append(float(confidence))
# while confidence in self.confidences:
# confidence += 0.000000000001 #TODO
def __str__(self):
return "MultiDataSet " + DataSet.__str__(self)
def __iter__(self):
return self.confidences.__iter__()
# Returns a subset of this dataset for which confidences are greater
# or equal to 'conf'.
def __getitem__(self, conf):
ret = DataSet()
for im in DataSet.__iter__(self):
for obj in DataSet.__getitem__(self, im):
if obj.confidence >= conf:
ret.add_obj(im, obj)
return ret
'''
i = 0
for c in self.confidences:
if self.confidences[c] >= conf:
break
i += 1
print i
return DataSetFromMulti(self, i)
'''
def __len__(self):
return DataSet.__len__(self)
def sort_confidences(self):
self.confidences.sort()
def get_confidences(self):
return self.confidences
def images_keys(self):
return DataSet.keys(self)
def n_confidences(self):
return len(self.confidences)
def confidence_max(self):
if self.confidences == []:
return 0
else:
return max(self.confidences)
def confidence_min(self):
if self.confidences == []:
return 0
else:
return min(self.confidences)
|
from sys import argv
from sys import path
from sys import maxint
script, first, second, third = argv
print "The script is called: ", script
print "Your first variable is: ", first
print "Your second variable is: ", second
print "Your third variable is: ", third
print "The path is: ", path[0]
print "The max int is: ", maxint
|
import json
from lxml import etree
from datetime import datetime
from dateutil.relativedelta import relativedelta
from openerp import api, fields, models, _
from openerp.tools import float_is_zero
from openerp.tools.misc import formatLang
from openerp.exceptions import UserError, RedirectWarning, ValidationError
import openerp.addons.decimal_precision as dp
TYPE2JOURNAL = {
'out_invoice': 'sale',
'in_invoice': 'purchase',
'out_refund': 'sale',
'in_refund': 'purchase',
}
TYPE2REFUND = {
'out_invoice': 'out_refund', # Customer Invoice
'in_invoice': 'in_refund', # Vendor Bill
'out_refund': 'out_invoice', # Customer Refund
'in_refund': 'in_invoice', # Vendor Refund
}
MAGIC_COLUMNS = ('id', 'create_uid', 'create_date', 'write_uid', 'write_date')
class AccountInvoice(models.Model):
_name = "account.invoice"
_inherit = ['mail.thread']
_description = "Invoice"
_order = "date_invoice desc, number desc, id desc"
@api.one
@api.depends('invoice_line_ids.price_subtotal', 'tax_line_ids.amount', 'currency_id', 'company_id')
def _compute_amount(self):
self.amount_untaxed = sum(line.price_subtotal for line in self.invoice_line_ids)
self.amount_tax = sum(line.amount for line in self.tax_line_ids)
self.amount_total = self.amount_untaxed + self.amount_tax
amount_total_company_signed = self.amount_total
amount_untaxed_signed = self.amount_untaxed
if self.currency_id and self.currency_id != self.company_id.currency_id:
amount_total_company_signed = self.currency_id.compute(self.amount_total, self.company_id.currency_id)
amount_untaxed_signed = self.currency_id.compute(self.amount_untaxed, self.company_id.currency_id)
sign = self.type in ['in_refund', 'out_refund'] and -1 or 1
self.amount_total_company_signed = amount_total_company_signed * sign
self.amount_total_signed = self.amount_total * sign
self.amount_untaxed_signed = amount_untaxed_signed * sign
@api.model
def _default_journal(self):
if self._context.get('default_journal_id', False):
return self.env['account.journal'].browse(self._context.get('default_journal_id'))
inv_type = self._context.get('type', 'out_invoice')
inv_types = inv_type if isinstance(inv_type, list) else [inv_type]
company_id = self._context.get('company_id', self.env.user.company_id.id)
domain = [
('type', 'in', filter(None, map(TYPE2JOURNAL.get, inv_types))),
('company_id', '=', company_id),
]
return self.env['account.journal'].search(domain, limit=1)
@api.model
def _default_currency(self):
journal = self._default_journal()
return journal.currency_id or journal.company_id.currency_id
@api.model
def _get_reference_type(self):
return [('none', _('Free Reference'))]
@api.one
@api.depends(
'state', 'currency_id', 'invoice_line_ids.price_subtotal',
'move_id.line_ids.amount_residual',
'move_id.line_ids.currency_id')
def _compute_residual(self):
residual = 0.0
residual_company_signed = 0.0
sign = self.type in ['in_refund', 'out_refund'] and -1 or 1
for line in self.sudo().move_id.line_ids:
if line.account_id.internal_type in ('receivable', 'payable'):
residual_company_signed += line.amount_residual
if line.currency_id == self.currency_id:
residual += line.amount_residual_currency if line.currency_id else line.amount_residual
else:
from_currency = (line.currency_id and line.currency_id.with_context(date=line.date)) or line.company_id.currency_id.with_context(date=line.date)
residual += from_currency.compute(line.amount_residual, self.currency_id)
self.residual_company_signed = abs(residual_company_signed) * sign
self.residual_signed = abs(residual) * sign
self.residual = abs(residual)
digits_rounding_precision = self.currency_id.rounding
if float_is_zero(self.residual, digits_rounding_precision):
self.reconciled = True
else:
self.reconciled = False
@api.one
def _get_outstanding_info_JSON(self):
self.outstanding_credits_debits_widget = json.dumps(False)
if self.state == 'open':
domain = [('journal_id.type', 'in', ('bank', 'cash')), ('account_id', '=', self.account_id.id), ('partner_id', '=', self.env['res.partner']._find_accounting_partner(self.partner_id).id), ('reconciled', '=', False), ('amount_residual', '!=', 0.0)]
if self.type in ('out_invoice', 'in_refund'):
domain.extend([('credit', '>', 0), ('debit', '=', 0)])
type_payment = _('Outstanding credits')
else:
domain.extend([('credit', '=', 0), ('debit', '>', 0)])
type_payment = _('Outstanding debits')
info = {'title': '', 'outstanding': True, 'content': [], 'invoice_id': self.id}
lines = self.env['account.move.line'].search(domain)
if len(lines) != 0:
for line in lines:
# get the outstanding residual value in invoice currency
# get the outstanding residual value in its currency. We don't want to show it
# in the invoice currency since the exchange rate between the invoice date and
# the payment date might have changed.
if line.currency_id:
currency_id = line.currency_id
amount_to_show = abs(line.amount_residual_currency)
else:
currency_id = line.company_id.currency_id
amount_to_show = abs(line.amount_residual)
info['content'].append({
'journal_name': line.ref or line.move_id.name,
'amount': amount_to_show,
'currency': currency_id.symbol,
'id': line.id,
'position': currency_id.position,
'digits': [69, self.currency_id.decimal_places],
})
info['title'] = type_payment
self.outstanding_credits_debits_widget = json.dumps(info)
self.has_outstanding = True
@api.one
@api.depends('payment_move_line_ids.amount_residual')
def _get_payment_info_JSON(self):
self.payments_widget = json.dumps(False)
if self.payment_move_line_ids:
info = {'title': _('Less Payment'), 'outstanding': False, 'content': []}
for payment in self.payment_move_line_ids:
#we don't take into account the movement created due to a change difference
if payment.currency_id and payment.move_id.rate_diff_partial_rec_id:
continue
if self.type in ('out_invoice', 'in_refund'):
amount = sum([p.amount for p in payment.matched_debit_ids if p.debit_move_id in self.move_id.line_ids])
amount_currency = sum([p.amount_currency for p in payment.matched_debit_ids if p.debit_move_id in self.move_id.line_ids])
elif self.type in ('in_invoice', 'out_refund'):
amount = sum([p.amount for p in payment.matched_credit_ids if p.credit_move_id in self.move_id.line_ids])
amount_currency = sum([p.amount_currency for p in payment.matched_credit_ids if p.credit_move_id in self.move_id.line_ids])
# Get the payment value in its currency. We don't want to show it in the invoice
# currency since the exchange rate between the invoice date and the payment date
# might have changed.
if payment.currency_id and amount_currency != 0:
currency_id = payment.currency_id
amount_to_show = -amount_currency
else:
currency_id = payment.company_id.currency_id
amount_to_show = -amount
info['content'].append({
'name': payment.name,
'journal_name': payment.journal_id.name,
'amount': amount_to_show,
'currency': currency_id.symbol,
'digits': [69, currency_id.decimal_places],
'position': currency_id.position,
'date': payment.date,
'payment_id': payment.id,
'move_id': payment.move_id.id,
'ref': payment.move_id.ref,
})
self.payments_widget = json.dumps(info)
@api.one
@api.depends('move_id.line_ids.amount_residual')
def _compute_payments(self):
payment_lines = []
for line in self.move_id.line_ids:
payment_lines.extend([rp.credit_move_id.id for rp in line.matched_credit_ids])
payment_lines.extend([rp.debit_move_id.id for rp in line.matched_debit_ids])
self.payment_move_line_ids = self.env['account.move.line'].browse(list(set(payment_lines)))
name = fields.Char(string='Reference/Description', index=True,
readonly=True, states={'draft': [('readonly', False)]}, copy=False, help='The name that will be used on account move lines')
origin = fields.Char(string='Source Document',
help="Reference of the document that produced this invoice.",
readonly=True, states={'draft': [('readonly', False)]})
type = fields.Selection([
('out_invoice','Customer Invoice'),
('in_invoice','Vendor Bill'),
('out_refund','Customer Refund'),
('in_refund','Vendor Refund'),
], readonly=True, index=True, change_default=True,
default=lambda self: self._context.get('type', 'out_invoice'),
track_visibility='always')
number = fields.Char(related='move_id.name', store=True, readonly=True, copy=False)
move_name = fields.Char(string='Journal Entry', readonly=True,
default=False, copy=False,
help="Technical field holding the number given to the invoice, automatically set when the invoice is validated then stored to set the same number again if the invoice is cancelled, set to draft and re-validated.")
reference = fields.Char(string='Vendor Reference',
help="The partner reference of this invoice.", readonly=True, states={'draft': [('readonly', False)]})
reference_type = fields.Selection('_get_reference_type', string='Payment Reference',
required=True, readonly=True, states={'draft': [('readonly', False)]},
default='none')
comment = fields.Text('Additional Information', readonly=True, states={'draft': [('readonly', False)]})
state = fields.Selection([
('draft','Draft'),
('proforma', 'Pro-forma'),
('proforma2', 'Pro-forma'),
('open', 'Open'),
('paid', 'Paid'),
('cancel', 'Cancelled'),
], string='Status', index=True, readonly=True, default='draft',
track_visibility='onchange', copy=False,
help=" * The 'Draft' status is used when a user is encoding a new and unconfirmed Invoice.\n"
" * The 'Pro-forma' status is used the invoice does not have an invoice number.\n"
" * The 'Open' status is used when user create invoice, an invoice number is generated. Its in open status till user does not pay invoice.\n"
" * The 'Paid' status is set automatically when the invoice is paid. Its related journal entries may or may not be reconciled.\n"
" * The 'Cancelled' status is used when user cancel invoice.")
sent = fields.Boolean(readonly=True, default=False, copy=False,
help="It indicates that the invoice has been sent.")
date_invoice = fields.Date(string='Invoice Date',
readonly=True, states={'draft': [('readonly', False)]}, index=True,
help="Keep empty to use the current date", copy=False)
date_due = fields.Date(string='Due Date',
readonly=True, states={'draft': [('readonly', False)]}, index=True, copy=False,
help="If you use payment terms, the due date will be computed automatically at the generation "
"of accounting entries. The payment term may compute several due dates, for example 50% "
"now and 50% in one month, but if you want to force a due date, make sure that the payment "
"term is not set on the invoice. If you keep the payment term and the due date empty, it "
"means direct payment.")
partner_id = fields.Many2one('res.partner', string='Partner', change_default=True,
required=True, readonly=True, states={'draft': [('readonly', False)]},
track_visibility='always')
payment_term_id = fields.Many2one('account.payment.term', string='Payment Term', oldname='payment_term',
readonly=True, states={'draft': [('readonly', False)]},
help="If you use payment terms, the due date will be computed automatically at the generation "
"of accounting entries. If you keep the payment term and the due date empty, it means direct payment. "
"The payment term may compute several due dates, for example 50% now, 50% in one month.")
date = fields.Date(string='Accounting Date',
copy=False,
help="Keep empty to use the invoice date.",
readonly=True, states={'draft': [('readonly', False)]})
account_id = fields.Many2one('account.account', string='Account',
required=True, readonly=True, states={'draft': [('readonly', False)]},
domain=[('deprecated', '=', False)], help="The partner account used for this invoice.")
invoice_line_ids = fields.One2many('account.invoice.line', 'invoice_id', string='Invoice Lines', oldname='invoice_line',
readonly=True, states={'draft': [('readonly', False)]}, copy=True)
tax_line_ids = fields.One2many('account.invoice.tax', 'invoice_id', string='Tax Lines', oldname='tax_line',
readonly=True, states={'draft': [('readonly', False)]}, copy=True)
move_id = fields.Many2one('account.move', string='Journal Entry',
readonly=True, index=True, ondelete='restrict', copy=False,
help="Link to the automatically generated Journal Items.")
amount_untaxed = fields.Monetary(string='Untaxed Amount',
store=True, readonly=True, compute='_compute_amount', track_visibility='always')
amount_untaxed_signed = fields.Monetary(string='Untaxed Amount', currency_field='company_currency_id',
store=True, readonly=True, compute='_compute_amount')
amount_tax = fields.Monetary(string='Tax',
store=True, readonly=True, compute='_compute_amount')
amount_total = fields.Monetary(string='Total',
store=True, readonly=True, compute='_compute_amount')
amount_total_signed = fields.Monetary(string='Total', currency_field='currency_id',
store=True, readonly=True, compute='_compute_amount',
help="Total amount in the currency of the invoice, negative for credit notes.")
amount_total_company_signed = fields.Monetary(string='Total', currency_field='company_currency_id',
store=True, readonly=True, compute='_compute_amount',
help="Total amount in the currency of the company, negative for credit notes.")
currency_id = fields.Many2one('res.currency', string='Currency',
required=True, readonly=True, states={'draft': [('readonly', False)]},
default=_default_currency, track_visibility='always')
company_currency_id = fields.Many2one('res.currency', related='company_id.currency_id', readonly=True)
journal_id = fields.Many2one('account.journal', string='Journal',
required=True, readonly=True, states={'draft': [('readonly', False)]},
default=_default_journal,
domain="[('type', 'in', {'out_invoice': ['sale'], 'out_refund': ['sale'], 'in_refund': ['purchase'], 'in_invoice': ['purchase']}.get(type, [])), ('company_id', '=', company_id)]")
company_id = fields.Many2one('res.company', string='Company', change_default=True,
required=True, readonly=True, states={'draft': [('readonly', False)]},
default=lambda self: self.env['res.company']._company_default_get('account.invoice'))
reconciled = fields.Boolean(string='Paid/Reconciled', store=True, readonly=True, compute='_compute_residual',
help="It indicates that the invoice has been paid and the journal entry of the invoice has been reconciled with one or several journal entries of payment.")
partner_bank_id = fields.Many2one('res.partner.bank', string='Bank Account',
help='Bank Account Number to which the invoice will be paid. A Company bank account if this is a Customer Invoice or Vendor Refund, otherwise a Partner bank account number.',
readonly=True, states={'draft': [('readonly', False)]})
residual = fields.Monetary(string='Amount Due',
compute='_compute_residual', store=True, help="Remaining amount due.")
residual_signed = fields.Monetary(string='Amount Due', currency_field='currency_id',
compute='_compute_residual', store=True, help="Remaining amount due in the currency of the invoice.")
residual_company_signed = fields.Monetary(string='Amount Due', currency_field='company_currency_id',
compute='_compute_residual', store=True, help="Remaining amount due in the currency of the company.")
payment_ids = fields.Many2many('account.payment', 'account_invoice_payment_rel', 'invoice_id', 'payment_id', string="Payments", copy=False, readonly=True)
payment_move_line_ids = fields.Many2many('account.move.line', string='Payments', compute='_compute_payments', store=True)
user_id = fields.Many2one('res.users', string='Salesperson', track_visibility='onchange',
readonly=True, states={'draft': [('readonly', False)]},
default=lambda self: self.env.user)
fiscal_position_id = fields.Many2one('account.fiscal.position', string='Fiscal Position', oldname='fiscal_position',
readonly=True, states={'draft': [('readonly', False)]})
commercial_partner_id = fields.Many2one('res.partner', string='Commercial Entity',
related='partner_id.commercial_partner_id', store=True, readonly=True,
help="The commercial entity that will be used on Journal Entries for this invoice")
outstanding_credits_debits_widget = fields.Text(compute='_get_outstanding_info_JSON')
payments_widget = fields.Text(compute='_get_payment_info_JSON')
has_outstanding = fields.Boolean(compute='_get_outstanding_info_JSON')
_sql_constraints = [
('number_uniq', 'unique(number, company_id, journal_id, type)', 'Invoice Number must be unique per Company!'),
]
@api.model
def create(self, vals):
if not vals.get('account_id',False):
raise UserError(_('Configuration error!\nCould not find any account to create the invoice, are you sure you have a chart of account installed?'))
return super(AccountInvoice, self.with_context(mail_create_nolog=True)).create(vals)
@api.model
def fields_view_get(self, view_id=None, view_type=False, toolbar=False, submenu=False):
def get_view_id(xid, name):
try:
return self.env.ref('account.' + xid)
except ValueError:
view = self.env['ir.ui.view'].search([('name', '=', name)], limit=1)
if not view:
return False
return view.id
context = self._context
if context.get('active_model') == 'res.partner' and context.get('active_ids'):
partner = self.env['res.partner'].browse(context['active_ids'])[0]
if not view_type:
view_id = get_view_id('invoice_tree', 'account.invoice.tree')
view_type = 'tree'
elif view_type == 'form':
if partner.supplier and not partner.customer:
view_id = get_view_id('invoice_supplier_form', 'account.invoice.supplier.form').id
elif partner.customer and not partner.supplier:
view_id = get_view_id('invoice_form', 'account.invoice.form').id
return super(AccountInvoice, self).fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu)
@api.multi
def invoice_print(self):
""" Print the invoice and mark it as sent, so that we can see more
easily the next step of the workflow
"""
self.ensure_one()
self.sent = True
return self.env['report'].get_action(self, 'account.report_invoice')
@api.multi
def action_invoice_sent(self):
""" Open a window to compose an email, with the edi invoice template
message loaded by default
"""
self.ensure_one()
template = self.env.ref('account.email_template_edi_invoice', False)
compose_form = self.env.ref('mail.email_compose_message_wizard_form', False)
ctx = dict(
default_model='account.invoice',
default_res_id=self.id,
default_use_template=bool(template),
default_template_id=template.id,
default_composition_mode='comment',
mark_invoice_as_sent=True,
)
return {
'name': _('Compose Email'),
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'mail.compose.message',
'views': [(compose_form.id, 'form')],
'view_id': compose_form.id,
'target': 'new',
'context': ctx,
}
@api.multi
def compute_taxes(self):
"""Function used in other module to compute the taxes on a fresh invoice created (onchanges did not applied)"""
account_invoice_tax = self.env['account.invoice.tax']
ctx = dict(self._context)
for invoice in self:
# Delete non-manual tax lines
self._cr.execute("DELETE FROM account_invoice_tax WHERE invoice_id=%s AND manual is False", (invoice.id,))
self.invalidate_cache()
# Generate one tax line per tax, however many invoice lines it's applied to
tax_grouped = invoice.get_taxes_values()
# Create new tax lines
for tax in tax_grouped.values():
account_invoice_tax.create(tax)
# dummy write on self to trigger recomputations
return self.with_context(ctx).write({'invoice_line_ids': []})
@api.multi
def confirm_paid(self):
return self.write({'state': 'paid'})
@api.multi
def unlink(self):
for invoice in self:
if invoice.state not in ('draft', 'cancel'):
raise UserError(_('You cannot delete an invoice which is not draft or cancelled. You should refund it instead.'))
elif invoice.move_name:
raise UserError(_('You cannot delete an invoice after it has been validated (and received a number). You can set it back to "Draft" state and modify its content, then re-confirm it.'))
return super(AccountInvoice, self).unlink()
@api.onchange('invoice_line_ids')
def _onchange_invoice_line_ids(self):
taxes_grouped = self.get_taxes_values()
tax_lines = self.tax_line_ids.browse([])
for tax in taxes_grouped.values():
tax_lines += tax_lines.new(tax)
self.tax_line_ids = tax_lines
return
@api.onchange('partner_id', 'company_id')
def _onchange_partner_id(self):
account_id = False
payment_term_id = False
fiscal_position = False
bank_id = False
p = self.partner_id
company_id = self.company_id.id
type = self.type
if p:
partner_id = p.id
rec_account = p.property_account_receivable_id
pay_account = p.property_account_payable_id
if company_id:
if p.property_account_receivable_id.company_id and \
p.property_account_receivable_id.company_id.id != company_id and \
p.property_account_payable_id.company_id and \
p.property_account_payable_id.company_id.id != company_id:
prop = self.env['ir.property']
rec_dom = [('name', '=', 'property_account_receivable_id'), ('company_id', '=', company_id)]
pay_dom = [('name', '=', 'property_account_payable_id'), ('company_id', '=', company_id)]
res_dom = [('res_id', '=', 'res.partner,%s' % partner_id)]
rec_prop = prop.search(rec_dom + res_dom) or prop.search(rec_dom)
pay_prop = prop.search(pay_dom + res_dom) or prop.search(pay_dom)
rec_account = rec_prop.get_by_record(rec_prop)
pay_account = pay_prop.get_by_record(pay_prop)
if not rec_account and not pay_account:
action = self.env.ref('account.action_account_config')
msg = _('Cannot find a chart of accounts for this company, You should configure it. \nPlease go to Account Configuration.')
raise RedirectWarning(msg, action.id, _('Go to the configuration panel'))
if type in ('out_invoice', 'out_refund'):
account_id = rec_account.id
payment_term_id = p.property_payment_term_id.id
else:
account_id = pay_account.id
payment_term_id = p.property_supplier_payment_term_id.id
fiscal_position = p.property_account_position_id.id
bank_id = p.bank_ids and p.bank_ids.ids[0] or False
self.account_id = account_id
self.payment_term_id = payment_term_id
self.fiscal_position_id = fiscal_position
if type in ('in_invoice', 'in_refund'):
self.partner_bank_id = bank_id
@api.onchange('journal_id')
def _onchange_journal_id(self):
if self.journal_id:
self.currency_id = self.journal_id.currency_id.id or self.journal_id.company_id.currency_id.id
@api.onchange('payment_term_id', 'date_invoice')
def _onchange_payment_term_date_invoice(self):
date_invoice = self.date_invoice
if not date_invoice:
date_invoice = fields.Date.context_today(self)
if not self.payment_term_id:
# When no payment term defined
self.date_due = self.date_due or self.date_invoice
else:
pterm = self.payment_term_id
pterm_list = pterm.with_context(currency_id=self.currency_id.id).compute(value=1, date_ref=date_invoice)[0]
self.date_due = max(line[0] for line in pterm_list)
@api.multi
def action_cancel_draft(self):
# go from canceled state to draft state
self.write({'state': 'draft'})
self.delete_workflow()
self.create_workflow()
return True
@api.multi
def get_taxes_values(self):
tax_grouped = {}
for line in self.invoice_line_ids:
price_unit = line.price_unit * (1 - (line.discount or 0.0) / 100.0)
taxes = line.invoice_line_tax_ids.compute_all(price_unit, self.currency_id, line.quantity, line.product_id, self.partner_id)['taxes']
for tax in taxes:
val = {
'invoice_id': self.id,
'name': tax['name'],
'tax_id': tax['id'],
'amount': tax['amount'],
'manual': False,
'sequence': tax['sequence'],
'account_analytic_id': tax['analytic'] and line.account_analytic_id.id or False,
'account_id': self.type in ('out_invoice', 'in_invoice') and (tax['account_id'] or line.account_id.id) or (tax['refund_account_id'] or line.account_id.id),
}
# If the taxes generate moves on the same financial account as the invoice line,
# propagate the analytic account from the invoice line to the tax line.
# This is necessary in situations were (part of) the taxes cannot be reclaimed,
# to ensure the tax move is allocated to the proper analytic account.
if not val.get('account_analytic_id') and line.account_analytic_id and val['account_id'] == line.account_id.id:
val['account_analytic_id'] = line.account_analytic_id.id
key = tax['id']
if key not in tax_grouped:
tax_grouped[key] = val
else:
tax_grouped[key]['amount'] += val['amount']
return tax_grouped
@api.multi
def register_payment(self, payment_line, writeoff_acc_id=False, writeoff_journal_id=False):
""" Reconcile payable/receivable lines from the invoice with payment_line """
line_to_reconcile = self.env['account.move.line']
for inv in self:
line_to_reconcile += inv.move_id.line_ids.filtered(lambda r: not r.reconciled and r.account_id.internal_type in ('payable', 'receivable'))
return (line_to_reconcile + payment_line).reconcile(writeoff_acc_id, writeoff_journal_id)
@api.v7
def assign_outstanding_credit(self, cr, uid, id, credit_aml_id, context=None):
credit_aml = self.pool.get('account.move.line').browse(cr, uid, credit_aml_id, context=context)
if credit_aml.payment_id:
credit_aml.payment_id.write({'invoice_ids': [(4, id, None)]})
return self.browse(cr, uid, id, context=context).register_payment(credit_aml)
@api.multi
def action_date_assign(self):
for inv in self:
# Here the onchange will automatically write to the database
inv._onchange_payment_term_date_invoice()
return True
@api.multi
def finalize_invoice_move_lines(self, move_lines):
""" finalize_invoice_move_lines(move_lines) -> move_lines
Hook method to be overridden in additional modules to verify and
possibly alter the move lines to be created by an invoice, for
special cases.
:param move_lines: list of dictionaries with the account.move.lines (as for create())
:return: the (possibly updated) final move_lines to create for this invoice
"""
return move_lines
@api.multi
def compute_invoice_totals(self, company_currency, invoice_move_lines):
total = 0
total_currency = 0
for line in invoice_move_lines:
if self.currency_id != company_currency:
currency = self.currency_id.with_context(date=self.date_invoice or fields.Date.context_today(self))
line['currency_id'] = currency.id
line['amount_currency'] = currency.round(line['price'])
line['price'] = currency.compute(line['price'], company_currency)
else:
line['currency_id'] = False
line['amount_currency'] = False
line['price'] = self.currency_id.round(line['price'])
if self.type in ('out_invoice', 'in_refund'):
total += line['price']
total_currency += line['amount_currency'] or line['price']
line['price'] = - line['price']
else:
total -= line['price']
total_currency -= line['amount_currency'] or line['price']
return total, total_currency, invoice_move_lines
@api.model
def invoice_line_move_line_get(self):
res = []
for line in self.invoice_line_ids:
tax_ids = []
for tax in line.invoice_line_tax_ids:
tax_ids.append((4, tax.id, None))
for child in tax.children_tax_ids:
if child.type_tax_use != 'none':
tax_ids.append((4, child.id, None))
move_line_dict = {
'invl_id': line.id,
'type': 'src',
'name': line.name.split('\n')[0][:64],
'price_unit': line.price_unit,
'quantity': line.quantity,
'price': line.price_subtotal,
'account_id': line.account_id.id,
'product_id': line.product_id.id,
'uom_id': line.uom_id.id,
'account_analytic_id': line.account_analytic_id.id,
'tax_ids': tax_ids,
'invoice_id': self.id,
}
if line['account_analytic_id']:
move_line_dict['analytic_line_ids'] = [(0, 0, line._get_analytic_line())]
res.append(move_line_dict)
return res
@api.model
def tax_line_move_line_get(self):
res = []
for tax_line in self.tax_line_ids:
res.append({
'tax_line_id': tax_line.tax_id.id,
'type': 'tax',
'name': tax_line.name,
'price_unit': tax_line.amount,
'quantity': 1,
'price': tax_line.amount,
'account_id': tax_line.account_id.id,
'account_analytic_id': tax_line.account_analytic_id.id,
})
return res
def inv_line_characteristic_hashcode(self, invoice_line):
"""Overridable hashcode generation for invoice lines. Lines having the same hashcode
will be grouped together if the journal has the 'group line' option. Of course a module
can add fields to invoice lines that would need to be tested too before merging lines
or not."""
return "%s-%s-%s-%s-%s" % (
invoice_line['account_id'],
invoice_line.get('tax_line_id', 'False'),
invoice_line.get('product_id', 'False'),
invoice_line.get('analytic_account_id', 'False'),
invoice_line.get('date_maturity', 'False'),
)
def group_lines(self, iml, line):
"""Merge account move lines (and hence analytic lines) if invoice line hashcodes are equals"""
if self.journal_id.group_invoice_lines:
line2 = {}
for x, y, l in line:
tmp = self.inv_line_characteristic_hashcode(l)
if tmp in line2:
am = line2[tmp]['debit'] - line2[tmp]['credit'] + (l['debit'] - l['credit'])
line2[tmp]['debit'] = (am > 0) and am or 0.0
line2[tmp]['credit'] = (am < 0) and -am or 0.0
line2[tmp]['analytic_line_ids'] += l['analytic_line_ids']
else:
line2[tmp] = l
line = []
for key, val in line2.items():
line.append((0, 0, val))
return line
@api.multi
def action_move_create(self):
""" Creates invoice related analytics and financial move lines """
account_move = self.env['account.move']
for inv in self:
if not inv.journal_id.sequence_id:
raise UserError(_('Please define sequence on the journal related to this invoice.'))
if not inv.invoice_line_ids:
raise UserError(_('Please create some invoice lines.'))
if inv.move_id:
continue
ctx = dict(self._context, lang=inv.partner_id.lang)
if not inv.date_invoice:
inv.with_context(ctx).write({'date_invoice': fields.Date.context_today(self)})
date_invoice = inv.date_invoice
company_currency = inv.company_id.currency_id
# create move lines (one per invoice line + eventual taxes and analytic lines)
iml = inv.invoice_line_move_line_get()
iml += inv.tax_line_move_line_get()
diff_currency = inv.currency_id != company_currency
# create one move line for the total and possibly adjust the other lines amount
total, total_currency, iml = inv.with_context(ctx).compute_invoice_totals(company_currency, iml)
name = inv.name or '/'
if inv.payment_term_id:
totlines = inv.with_context(ctx).payment_term_id.with_context(currency_id=inv.currency_id.id).compute(total, date_invoice)[0]
res_amount_currency = total_currency
ctx['date'] = date_invoice
for i, t in enumerate(totlines):
if inv.currency_id != company_currency:
amount_currency = company_currency.with_context(ctx).compute(t[1], inv.currency_id)
else:
amount_currency = False
# last line: add the diff
res_amount_currency -= amount_currency or 0
if i + 1 == len(totlines):
amount_currency += res_amount_currency
iml.append({
'type': 'dest',
'name': name,
'price': t[1],
'account_id': inv.account_id.id,
'date_maturity': t[0],
'amount_currency': diff_currency and amount_currency,
'currency_id': diff_currency and inv.currency_id.id,
'invoice_id': inv.id
})
else:
iml.append({
'type': 'dest',
'name': name,
'price': total,
'account_id': inv.account_id.id,
'date_maturity': inv.date_due,
'amount_currency': diff_currency and total_currency,
'currency_id': diff_currency and inv.currency_id.id,
'invoice_id': inv.id
})
part = self.env['res.partner']._find_accounting_partner(inv.partner_id)
line = [(0, 0, self.line_get_convert(l, part.id)) for l in iml]
line = inv.group_lines(iml, line)
journal = inv.journal_id.with_context(ctx)
line = inv.finalize_invoice_move_lines(line)
date = inv.date or date_invoice
move_vals = {
'ref': inv.reference,
'line_ids': line,
'journal_id': journal.id,
'date': date,
'narration': inv.comment,
}
ctx['company_id'] = inv.company_id.id
ctx['dont_create_taxes'] = True
ctx['invoice'] = inv
ctx_nolang = ctx.copy()
ctx_nolang.pop('lang', None)
move = account_move.with_context(ctx_nolang).create(move_vals)
# Pass invoice in context in method post: used if you want to get the same
# account move reference when creating the same invoice after a cancelled one:
move.post()
# make the invoice point to that move
vals = {
'move_id': move.id,
'date': date,
'move_name': move.name,
}
inv.with_context(ctx).write(vals)
return True
@api.multi
def invoice_validate(self):
for invoice in self:
#refuse to validate a vendor bill/refund if there already exists one with the same reference for the same partner,
#because it's probably a double encoding of the same bill/refund
if invoice.type in ('in_invoice', 'in_refund') and invoice.reference:
if self.search([('type', '=', invoice.type), ('reference', '=', invoice.reference), ('company_id', '=', invoice.company_id.id), ('commercial_partner_id', '=', invoice.commercial_partner_id.id), ('id', '!=', invoice.id)]):
raise UserError(_("Duplicated vendor reference detected. You probably encoded twice the same vendor bill/refund."))
return self.write({'state': 'open'})
@api.model
def line_get_convert(self, line, part):
return {
'date_maturity': line.get('date_maturity', False),
'partner_id': part,
'name': line['name'][:64],
'debit': line['price'] > 0 and line['price'],
'credit': line['price'] < 0 and -line['price'],
'account_id': line['account_id'],
'analytic_line_ids': line.get('analytic_line_ids', []),
'amount_currency': line['price'] > 0 and abs(line.get('amount_currency', False)) or -abs(line.get('amount_currency', False)),
'currency_id': line.get('currency_id', False),
'quantity': line.get('quantity', 1.00),
'product_id': line.get('product_id', False),
'product_uom_id': line.get('uom_id', False),
'analytic_account_id': line.get('account_analytic_id', False),
'invoice_id': line.get('invoice_id', False),
'tax_ids': line.get('tax_ids', False),
'tax_line_id': line.get('tax_line_id', False),
}
@api.multi
def action_cancel(self):
moves = self.env['account.move']
for inv in self:
if inv.move_id:
moves += inv.move_id
if inv.payment_move_line_ids:
raise UserError(_('You cannot cancel an invoice which is partially paid. You need to unreconcile related payment entries first.'))
# First, set the invoices as cancelled and detach the move ids
self.write({'state': 'cancel', 'move_id': False})
if moves:
# second, invalidate the move(s)
moves.button_cancel()
# delete the move this invoice was pointing to
# Note that the corresponding move_lines and move_reconciles
# will be automatically deleted too
moves.unlink()
return True
###################
@api.multi
def name_get(self):
TYPES = {
'out_invoice': _('Invoice'),
'in_invoice': _('Vendor Bill'),
'out_refund': _('Refund'),
'in_refund': _('Vendor Refund'),
}
result = []
for inv in self:
result.append((inv.id, "%s %s" % (inv.number or TYPES[inv.type], inv.name or '')))
return result
@api.model
def name_search(self, name, args=None, operator='ilike', limit=100):
args = args or []
recs = self.browse()
if name:
recs = self.search([('number', '=', name)] + args, limit=limit)
if not recs:
recs = self.search([('name', operator, name)] + args, limit=limit)
return recs.name_get()
@api.model
def _refund_cleanup_lines(self, lines):
""" Convert records to dict of values suitable for one2many line creation
:param recordset lines: records to convert
:return: list of command tuple for one2many line creation [(0, 0, dict of valueis), ...]
"""
result = []
for line in lines:
values = {}
for name, field in line._fields.iteritems():
if name in MAGIC_COLUMNS:
continue
elif field.type == 'many2one':
values[name] = line[name].id
elif field.type not in ['many2many', 'one2many']:
values[name] = line[name]
elif name == 'invoice_line_tax_ids':
values[name] = [(6, 0, line[name].ids)]
result.append((0, 0, values))
return result
@api.model
def _prepare_refund(self, invoice, date_invoice=None, date=None, description=None, journal_id=None):
""" Prepare the dict of values to create the new refund from the invoice.
This method may be overridden to implement custom
refund generation (making sure to call super() to establish
a clean extension chain).
:param record invoice: invoice to refund
:param string date_invoice: refund creation date from the wizard
:param integer date: force date from the wizard
:param string description: description of the refund from the wizard
:param integer journal_id: account.journal from the wizard
:return: dict of value to create() the refund
"""
values = {}
for field in ['name', 'reference', 'comment', 'date_due', 'partner_id', 'company_id',
'account_id', 'currency_id', 'payment_term_id', 'user_id', 'fiscal_position_id']:
if invoice._fields[field].type == 'many2one':
values[field] = invoice[field].id
else:
values[field] = invoice[field] or False
values['invoice_line_ids'] = self._refund_cleanup_lines(invoice.invoice_line_ids)
tax_lines = filter(lambda l: l.manual, invoice.tax_line_ids)
values['tax_line_ids'] = self._refund_cleanup_lines(tax_lines)
if journal_id:
journal = self.env['account.journal'].browse(journal_id)
elif invoice['type'] == 'in_invoice':
journal = self.env['account.journal'].search([('type', '=', 'purchase')], limit=1)
else:
journal = self.env['account.journal'].search([('type', '=', 'sale')], limit=1)
values['journal_id'] = journal.id
values['type'] = TYPE2REFUND[invoice['type']]
values['date_invoice'] = date_invoice or fields.Date.context_today(invoice)
values['state'] = 'draft'
values['number'] = False
values['origin'] = invoice.number
if date:
values['date'] = date
if description:
values['name'] = description
return values
@api.multi
@api.returns('self')
def refund(self, date_invoice=None, date=None, description=None, journal_id=None):
new_invoices = self.browse()
for invoice in self:
# create the new invoice
values = self._prepare_refund(invoice, date_invoice=date_invoice, date=date,
description=description, journal_id=journal_id)
new_invoices += self.create(values)
return new_invoices
@api.v8
def pay_and_reconcile(self, pay_journal, pay_amount=None, date=None, writeoff_acc=None):
""" Create and post an account.payment for the invoice self, which creates a journal entry that reconciles the invoice.
:param pay_journal: journal in which the payment entry will be created
:param pay_amount: amount of the payment to register, defaults to the residual of the invoice
:param date: payment date, defaults to fields.Date.context_today(self)
:param writeoff_acc: account in which to create a writeoff if pay_amount < self.residual, so that the invoice is fully paid
"""
assert len(self) == 1, "Can only pay one invoice at a time."
payment_type = self.type in ('out_invoice', 'in_refund') and 'inbound' or 'outbound'
if payment_type == 'inbound':
payment_method = self.env.ref('account.account_payment_method_manual_in')
journal_payment_methods = pay_journal.inbound_payment_method_ids
else:
payment_method = self.env.ref('account.account_payment_method_manual_out')
journal_payment_methods = pay_journal.outbound_payment_method_ids
if payment_method not in journal_payment_methods:
raise UserError(_('No appropriate payment method enabled on journal %s') % pay_journal.name)
payment = self.env['account.payment'].create({
'invoice_ids': [(6, 0, self.ids)],
'amount': pay_amount or self.residual,
'payment_date': date or fields.Date.context_today(self),
'communication': self.type in ('in_invoice', 'in_refund') and self.reference or self.number,
'partner_id': self.partner_id.id,
'partner_type': self.type in ('out_invoice', 'out_refund') and 'customer' or 'supplier',
'journal_id': pay_journal.id,
'payment_type': payment_type,
'payment_method_id': payment_method.id,
'payment_difference_handling': writeoff_acc and 'reconcile' or 'open',
'writeoff_account_id': writeoff_acc and writeoff_acc.id or False,
})
payment.post()
@api.v7
def pay_and_reconcile(self, cr, uid, ids, pay_journal_id, pay_amount=None, date=None, writeoff_acc_id=None, context=None):
recs = self.browse(cr, uid, ids, context)
pay_journal = self.pool.get('account.journal').browse(cr, uid, pay_journal_id, context=context)
writeoff_acc = self.pool.get('account.account').browse(cr, uid, writeoff_acc_id, context=context)
return recs.pay_and_reconcile(pay_journal, pay_amount, date, writeoff_acc)
@api.multi
def _track_subtype(self, init_values):
self.ensure_one()
if 'state' in init_values and self.state == 'paid' and self.type in ('out_invoice', 'out_refund'):
return 'account.mt_invoice_paid'
elif 'state' in init_values and self.state == 'open' and self.type in ('out_invoice', 'out_refund'):
return 'account.mt_invoice_validated'
elif 'state' in init_values and self.state == 'draft' and self.type in ('out_invoice', 'out_refund'):
return 'account.mt_invoice_created'
return super(AccountInvoice, self)._track_subtype(init_values)
@api.multi
def _get_tax_amount_by_group(self):
self.ensure_one()
res = {}
currency = self.currency_id or self.company_id.currency_id
for line in self.tax_line_ids:
res.setdefault(line.tax_id.tax_group_id, 0.0)
res[line.tax_id.tax_group_id] += line.amount
res = sorted(res.items(), key=lambda l: l[0].sequence)
res = map(lambda l: (l[0].name, formatLang(self.env, l[1], currency_obj=currency)), res)
return res
class AccountInvoiceLine(models.Model):
_name = "account.invoice.line"
_description = "Invoice Line"
_order = "invoice_id,sequence,id"
@api.multi
def _get_analytic_line(self):
ref = self.invoice_id.number
return {
'name': self.name,
'date': self.invoice_id.date_invoice,
'account_id': self.account_analytic_id.id,
'unit_amount': self.quantity,
'amount': self.price_subtotal_signed,
'product_id': self.product_id.id,
'product_uom_id': self.uom_id.id,
'general_account_id': self.account_id.id,
'ref': ref,
}
@api.one
@api.depends('price_unit', 'discount', 'invoice_line_tax_ids', 'quantity',
'product_id', 'invoice_id.partner_id', 'invoice_id.currency_id', 'invoice_id.company_id')
def _compute_price(self):
currency = self.invoice_id and self.invoice_id.currency_id or None
price = self.price_unit * (1 - (self.discount or 0.0) / 100.0)
taxes = False
if self.invoice_line_tax_ids:
taxes = self.invoice_line_tax_ids.compute_all(price, currency, self.quantity, product=self.product_id, partner=self.invoice_id.partner_id)
self.price_subtotal = price_subtotal_signed = taxes['total_excluded'] if taxes else self.quantity * price
if self.invoice_id.currency_id and self.invoice_id.currency_id != self.invoice_id.company_id.currency_id:
price_subtotal_signed = self.invoice_id.currency_id.compute(price_subtotal_signed, self.invoice_id.company_id.currency_id)
sign = self.invoice_id.type in ['in_refund', 'out_refund'] and -1 or 1
self.price_subtotal_signed = price_subtotal_signed * sign
@api.model
def _default_account(self):
if self._context.get('journal_id'):
journal = self.env['account.journal'].browse(self._context.get('journal_id'))
if self._context.get('type') in ('out_invoice', 'in_refund'):
return journal.default_credit_account_id.id
return journal.default_debit_account_id.id
name = fields.Text(string='Description', required=True)
origin = fields.Char(string='Source Document',
help="Reference of the document that produced this invoice.")
sequence = fields.Integer(default=10,
help="Gives the sequence of this line when displaying the invoice.")
invoice_id = fields.Many2one('account.invoice', string='Invoice Reference',
ondelete='cascade', index=True)
uom_id = fields.Many2one('product.uom', string='Unit of Measure',
ondelete='set null', index=True, oldname='uos_id')
product_id = fields.Many2one('product.product', string='Product',
ondelete='restrict', index=True)
account_id = fields.Many2one('account.account', string='Account',
required=True, domain=[('deprecated', '=', False)],
default=_default_account,
help="The income or expense account related to the selected product.")
price_unit = fields.Monetary(string='Unit Price', required=True)
price_subtotal = fields.Monetary(string='Amount',
store=True, readonly=True, compute='_compute_price')
price_subtotal_signed = fields.Monetary(string='Amount Signed', currency_field='company_currency_id',
store=True, readonly=True, compute='_compute_price',
help="Total amount in the currency of the company, negative for credit notes.")
quantity = fields.Float(string='Quantity', digits=dp.get_precision('Product Unit of Measure'),
required=True, default=1)
discount = fields.Float(string='Discount (%)', digits=dp.get_precision('Discount'),
default=0.0)
invoice_line_tax_ids = fields.Many2many('account.tax',
'account_invoice_line_tax', 'invoice_line_id', 'tax_id',
string='Taxes', domain=[('type_tax_use','!=','none')], oldname='invoice_line_tax_id')
account_analytic_id = fields.Many2one('account.analytic.account',
string='Analytic Account')
company_id = fields.Many2one('res.company', string='Company',
related='invoice_id.company_id', store=True, readonly=True)
partner_id = fields.Many2one('res.partner', string='Partner',
related='invoice_id.partner_id', store=True, readonly=True)
currency_id = fields.Many2one('res.currency', related='invoice_id.currency_id', store=True)
company_currency_id = fields.Many2one('res.currency', related='invoice_id.company_currency_id', readonly=True)
@api.model
def fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False):
res = super(AccountInvoiceLine, self).fields_view_get(
view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu)
if self._context.get('type'):
doc = etree.XML(res['arch'])
for node in doc.xpath("//field[@name='product_id']"):
if self._context['type'] in ('in_invoice', 'in_refund'):
node.set('domain', "[('purchase_ok', '=', True)]")
else:
node.set('domain', "[('sale_ok', '=', True)]")
res['arch'] = etree.tostring(doc)
return res
@api.v8
def get_invoice_line_account(self, type, product, fpos, company):
accounts = product.product_tmpl_id.get_product_accounts(fpos)
if type in ('out_invoice', 'out_refund'):
return accounts['income']
return accounts['expense']
def _set_taxes(self):
""" Used in on_change to set taxes and price."""
if self.invoice_id.type in ('out_invoice', 'out_refund'):
taxes = self.product_id.taxes_id or self.account_id.tax_ids
else:
taxes = self.product_id.supplier_taxes_id or self.account_id.tax_ids
self.invoice_line_tax_ids = fp_taxes = self.invoice_id.fiscal_position_id.map_tax(taxes)
fix_price = self.env['account.tax']._fix_tax_included_price
if type in ('in_invoice', 'in_refund'):
if not self.price_unit or self.price_unit == self.product_id.standard_price:
self.price_unit = fix_price(self.product_id.standard_price, taxes, fp_taxes)
else:
self.price_unit = fix_price(self.product_id.lst_price, taxes, fp_taxes)
@api.onchange('product_id')
def _onchange_product_id(self):
domain = {}
if not self.invoice_id:
return
part = self.invoice_id.partner_id
fpos = self.invoice_id.fiscal_position_id
company = self.invoice_id.company_id
currency = self.invoice_id.currency_id
type = self.invoice_id.type
if not part:
warning = {
'title': _('Warning!'),
'message': _('You must first select a partner!'),
}
return {'warning': warning}
if not self.product_id:
if type not in ('in_invoice', 'in_refund'):
self.price_unit = 0.0
domain['uom_id'] = []
else:
if part.lang:
product = self.product_id.with_context(lang=part.lang)
else:
product = self.product_id
self.name = product.partner_ref
account = self.get_invoice_line_account(type, product, fpos, company)
if account:
self.account_id = account.id
self._set_taxes()
if type in ('in_invoice', 'in_refund'):
if product.description_purchase:
self.name += '\n' + product.description_purchase
else:
if product.description_sale:
self.name += '\n' + product.description_sale
if not self.uom_id or product.uom_id.category_id.id != self.uom_id.category_id.id:
self.uom_id = product.uom_id.id
domain['uom_id'] = [('category_id', '=', product.uom_id.category_id.id)]
if company and currency:
if company.currency_id != currency:
self.price_unit = self.price_unit * currency.with_context(dict(self._context or {}, date=self.invoice_id.date_invoice)).rate
if self.uom_id and self.uom_id.id != product.uom_id.id:
self.price_unit = self.env['product.uom']._compute_price(
product.uom_id.id, self.price_unit, self.uom_id.id)
return {'domain': domain}
@api.onchange('account_id')
def _onchange_account_id(self):
if not self.account_id:
return
if not self.product_id:
fpos = self.invoice_id.fiscal_position_id
self.invoice_line_tax_ids = fpos.map_tax(self.account_id.tax_ids).ids
elif not self.price_unit:
self._set_taxes()
@api.onchange('uom_id')
def _onchange_uom_id(self):
warning = {}
result = {}
self._onchange_product_id()
if not self.uom_id:
self.price_unit = 0.0
if self.product_id and self.uom_id:
if self.product_id.uom_id.category_id.id != self.uom_id.category_id.id:
warning = {
'title': _('Warning!'),
'message': _('The selected unit of measure is not compatible with the unit of measure of the product.'),
}
self.uom_id = self.product_id.uom_id.id
if warning:
result['warning'] = warning
return result
class AccountInvoiceTax(models.Model):
_name = "account.invoice.tax"
_description = "Invoice Tax"
_order = 'sequence'
invoice_id = fields.Many2one('account.invoice', string='Invoice', ondelete='cascade', index=True)
name = fields.Char(string='Tax Description', required=True)
tax_id = fields.Many2one('account.tax', string='Tax')
account_id = fields.Many2one('account.account', string='Tax Account', required=True, domain=[('deprecated', '=', False)])
account_analytic_id = fields.Many2one('account.analytic.account', string='Analytic account')
amount = fields.Monetary()
manual = fields.Boolean(default=True)
sequence = fields.Integer(help="Gives the sequence order when displaying a list of invoice tax.")
company_id = fields.Many2one('res.company', string='Company', related='account_id.company_id', store=True, readonly=True)
currency_id = fields.Many2one('res.currency', related='invoice_id.currency_id', store=True, readonly=True)
class AccountPaymentTerm(models.Model):
_name = "account.payment.term"
_description = "Payment Term"
_order = "name"
def _default_line_ids(self):
return [(0, 0, {'value': 'balance', 'value_amount': 0.0, 'sequence': 9, 'days': 0, 'option': 'day_after_invoice_date'})]
name = fields.Char(string='Payment Term', translate=True, required=True)
active = fields.Boolean(default=True, help="If the active field is set to False, it will allow you to hide the payment term without removing it.")
note = fields.Text(string='Description on the Invoice', translate=True)
line_ids = fields.One2many('account.payment.term.line', 'payment_id', string='Terms', copy=True, default=_default_line_ids)
company_id = fields.Many2one('res.company', string='Company', required=True, default=lambda self: self.env.user.company_id)
@api.constrains('line_ids')
@api.one
def _check_lines(self):
payment_term_lines = self.line_ids.sorted()
if payment_term_lines and payment_term_lines[-1].value != 'balance':
raise ValidationError(_('A Payment Term should have its last line of type Balance.'))
lines = self.line_ids.filtered(lambda r: r.value == 'balance')
if len(lines) > 1:
raise ValidationError(_('A Payment Term should have only one line of type Balance.'))
@api.one
def compute(self, value, date_ref=False):
date_ref = date_ref or fields.Date.today()
amount = value
result = []
if self.env.context.get('currency_id'):
currency = self.env['res.currency'].browse(self.env.context['currency_id'])
else:
currency = self.env.user.company_id.currency_id
prec = currency.decimal_places
for line in self.line_ids:
if line.value == 'fixed':
amt = round(line.value_amount, prec)
elif line.value == 'percent':
amt = round(value * (line.value_amount / 100.0), prec)
elif line.value == 'balance':
amt = round(amount, prec)
if amt:
next_date = fields.Date.from_string(date_ref)
if line.option == 'day_after_invoice_date':
next_date += relativedelta(days=line.days)
elif line.option == 'fix_day_following_month':
next_first_date = next_date + relativedelta(day=1, months=1) # Getting 1st of next month
next_date = next_first_date + relativedelta(days=line.days - 1)
elif line.option == 'last_day_following_month':
next_date += relativedelta(day=31, months=1) # Getting last day of next month
elif line.option == 'last_day_current_month':
next_date += relativedelta(day=31, months=0) # Getting last day of next month
result.append((fields.Date.to_string(next_date), amt))
amount -= amt
amount = reduce(lambda x, y: x + y[1], result, 0.0)
dist = round(value - amount, prec)
if dist:
last_date = result and result[-1][0] or fields.Date.today()
result.append((last_date, dist))
return result
class AccountPaymentTermLine(models.Model):
_name = "account.payment.term.line"
_description = "Payment Term Line"
_order = "sequence"
value = fields.Selection([
('balance', 'Balance'),
('percent', 'Percent'),
('fixed', 'Fixed Amount')
], string='Type', required=True, default='balance',
help="Select here the kind of valuation related to this payment term line.")
value_amount = fields.Float(string='Value', digits=dp.get_precision('Payment Term'), help="For percent enter a ratio between 0-100.")
days = fields.Integer(string='Number of Days', required=True, default=0)
option = fields.Selection([
('day_after_invoice_date', 'Day(s) after the invoice date'),
('fix_day_following_month', 'Fixed day of the following month'),
('last_day_following_month', 'Last day of following month'),
('last_day_current_month', 'Last day of current month'),
],
default='day_after_invoice_date', required=True, string='Options'
)
payment_id = fields.Many2one('account.payment.term', string='Payment Term', required=True, index=True, ondelete='cascade')
sequence = fields.Integer(default=10, help="Gives the sequence order when displaying a list of payment term lines.")
@api.one
@api.constrains('value', 'value_amount')
def _check_percent(self):
if self.value == 'percent' and (self.value_amount < 0.0 or self.value_amount > 100.0):
raise UserError(_('Percentages for Payment Term Line must be between 0 and 100.'))
@api.onchange('option')
def _onchange_option(self):
if self.option in ('last_day_current_month', 'last_day_following_month'):
self.days = 0
class MailComposeMessage(models.Model):
_inherit = 'mail.compose.message'
@api.multi
def send_mail(self, auto_commit=False):
context = self._context
if context.get('default_model') == 'account.invoice' and \
context.get('default_res_id') and context.get('mark_invoice_as_sent'):
invoice = self.env['account.invoice'].browse(context['default_res_id'])
invoice = invoice.with_context(mail_post_autofollow=True)
invoice.sent = True
invoice.message_post(body=_("Invoice sent"))
return super(MailComposeMessage, self).send_mail(auto_commit=auto_commit)
|
import bpy
from .exposure import *
from .getters import *
from .merge_utils import *
from ..mat_utils import *
from ..matlist_utils import *
from ..brick import *
def update_materials(bricksdict, source_dup, keys, cur_frame=None, action="CREATE"):
""" sets all mat_names in bricksdict based on near_face """
scn, cm, n = get_active_context_info()
use_uv_map = cm.use_uv_map and (len(source_dup.data.uv_layers) > 0 or cm.uv_image is not None)
# initialize variables
is_smoke = cm.is_smoke
material_type = cm.material_type
mat_shell_depth = cm.mat_shell_depth
color_snap = cm.color_snap
uv_image = cm.uv_image
include_transparency = cm.include_transparency
trans_weight = cm.transparent_weight
sss = cm.color_snap_sss
sssSat = cm.color_snap_sss_saturation
sat_mat = get_saturation_matrix(sssSat)
specular = cm.color_snap_specular
roughness = cm.color_snap_roughness
ior = cm.color_snap_ior
transmission = cm.color_snap_transmission
displacement = cm.color_snap_displacement
color_depth = cm.color_depth if color_snap == "RGB" else 0
blur_radius = cm.blur_radius if color_snap == "RGB" else 0
use_abs_template = cm.use_abs_template and brick_materials_installed()
last_use_abs_template = cm.last_use_abs_template and brick_materials_installed()
rgba_vals = []
# get original mat_names, and populate rgba_vals
for key in keys:
brick_d = bricksdict[key]
# skip irrelevant bricks
nf = brick_d["near_face"]
if not brick_d["draw"] or (nf is None and not is_smoke) or (brick_d["custom_mat_name"] and is_mat_shell_val(brick_d["val"], mat_shell_depth)):
continue
# get RGBA value at nearest face intersection
if is_smoke:
rgba = brick_d["rgba"]
mat_name = ""
else:
ni = Vector(brick_d["near_intersection"])
rgba, mat_name = get_brick_rgba(source_dup, nf, ni, uv_image, color_depth=color_depth, blur_radius=blur_radius)
if material_type == "SOURCE":
# get material with snapped RGBA value
if rgba is None and use_uv_map:
mat_name = ""
elif color_snap == "ABS":
# if original material was ABS plastic, keep it
if rgba is None and mat_name in get_colors().keys():
pass
# otherwise, find nearest ABS plastic material to rgba value
else:
mat_obj = get_mat_obj(cm, typ="ABS")
assert len(mat_obj.data.materials) > 0
mat_name = find_nearest_brick_color_name(rgba, trans_weight, mat_obj)
elif color_snap == "RGB" or is_smoke:# or use_uv_map:
mat_name = create_new_material(n, rgba, rgba_vals, sss, sat_mat, specular, roughness, ior, transmission, displacement, use_abs_template, last_use_abs_template, include_transparency, cur_frame)
if rgba is not None:
rgba_vals.append(rgba)
elif material_type == "CUSTOM":
mat_name = cm.custom_mat.name
brick_d["mat_name"] = mat_name
# clear unused materials (left over from previous model)
mat_name_start = "Bricker_{n}{f}".format(n=n, f="f_%(cur_frame)s" % locals() if cur_frame else "")
cur_mats = [mat for mat in bpy.data.materials if mat.name.startswith(mat_name_start)]
# for mat in cur_mats:
# if mat.users == 0:
# bpy.data.materials.remove(mat)
# # else:
# # rgba_vals.append(mat.diffuse_color)
return bricksdict
def update_brick_sizes(bricksdict, key, loc, brick_sizes, zstep, max_L, height_3_only, merge_internals_h, merge_internals_v, material_type, merge_inconsistent_mats=False, merge_vertical=False, mult=(1, 1, 1)):
""" update 'brick_sizes' with available brick sizes surrounding bricksdict[key] """
if not merge_vertical:
max_L[2] = 1
new_max1 = max_L[1]
new_max2 = max_L[2]
break_outer1 = False
break_outer2 = False
brick_mat_name = bricksdict[key]["mat_name"]
# iterate in x direction
for i in range(max_L[0]):
# iterate in y direction
for j in range(max_L[1]):
# break case 1
if j >= new_max1: break
# break case 2
key1 = list_to_str((loc[0] + (i * mult[0]), loc[1] + (j * mult[1]), loc[2]))
brick_available, brick_mat_name = brick_avail(bricksdict, key1, brick_mat_name, merge_internals_h, material_type, merge_inconsistent_mats)
if not brick_available:
if j == 0: break_outer2 = True
else: new_max1 = j
break
# else, check vertically
for k in range(0, max_L[2], zstep):
# break case 1
if k >= new_max2: break
# break case 2
key2 = list_to_str((loc[0] + (i * mult[0]), loc[1] + (j * mult[1]), loc[2] + (k * mult[2])))
brick_available, brick_mat_name = brick_avail(bricksdict, key2, brick_mat_name, merge_internals_v, material_type, merge_inconsistent_mats)
if not brick_available:
if k == 0: break_outer1 = True
else: new_max2 = k
break
# bricks with 2/3 height can't exist
elif k == 1: continue
# else, append current brick size to brick_sizes
else:
new_size = [(i+1) * mult[0], (j+1) * mult[1], (k+zstep) * mult[2]]
if new_size in brick_sizes:
continue
if not (abs(new_size[2]) == 1 and height_3_only):
brick_sizes.append(new_size)
if break_outer1: break
break_outer1 = False
if break_outer2: break
def attempt_pre_merge(bricksdict, key, default_size, zstep, brick_type, max_width, max_depth, mat_shell_depth, legal_bricks_only, internal_mat_name, merge_internals_h, merge_internals_v, material_type, loc=None, axis_sort_order=(2, 0, 1), merge_inconsistent_mats=False, prefer_largest=False, direction_mult=(1, 1, 1), merge_vertical=True, target_type=None, height_3_only=False):
""" attempt to merge bricksdict[key] with adjacent bricks (assuming available keys are all 1x1s) """
assert brick_type != "CUSTOM"
# get loc from key
loc = loc or get_dict_loc(bricksdict, key)
brick_sizes = [default_size]
brick_size = default_size
keys_in_brick = {key}
tall_type = get_tall_type(bricksdict[key], target_type)
short_type = get_short_type(bricksdict[key], target_type)
# check width-depth and depth-width
for i in (1, -1) if max_width != max_depth else [1]:
# iterate through adjacent locs to find available brick sizes
update_brick_sizes(bricksdict, key, loc, brick_sizes, zstep, [max_width, max_depth][::i] + [3], height_3_only, merge_internals_h, merge_internals_v, material_type, merge_inconsistent_mats, merge_vertical=merge_vertical, mult=direction_mult)
# get largest (legal, if checked) brick size found
brick_sizes.sort(key=lambda v: -abs(v[0] * v[1] * v[2]) if prefer_largest else (-abs(v[axis_sort_order[0]]), -abs(v[axis_sort_order[1]]), -abs(v[axis_sort_order[2]])))
target_brick_size = next((sz for sz in brick_sizes if not (legal_bricks_only and not is_legal_brick_size(size=[abs(v) for v in sz], type=tall_type if abs(sz[2]) == 3 else short_type, mat_name=get_most_frequent_mat_name(bricksdict, *get_new_parent_key_loc_and_size_flipped(sz, loc, zstep)[::2], zstep, mat_shell_depth), internal_mat_name=internal_mat_name))))
assert target_brick_size is not None
# get new brick_size, loc, and key for largest brick size
key, loc, brick_size = get_new_parent_key_loc_and_size_flipped(target_brick_size, loc, zstep)
# update bricksdict for keys merged together
keys_in_brick = get_keys_in_brick(bricksdict, brick_size, zstep, loc=loc)
update_merged_keys_in_bricksdict(bricksdict, key, keys_in_brick, brick_size, brick_type, short_type, tall_type, set_attempted_merge=True)
return brick_size, key, keys_in_brick
def reset_bricksdict_entries(bricksdict, keys, force_outside=False):
for k in keys:
brick_d = bricksdict[k]
brick_d["draw"] = False
if force_outside:
brick_d["val"] = 0
else:
set_brick_val(bricksdict, get_dict_loc(bricksdict, k), k, action="REMOVE")
brick_d["size"] = None
brick_d["parent"] = None
brick_d["flipped"] = False
brick_d["rotated"] = False
brick_d["bot_exposed"] = None
brick_d["top_exposed"] = None
brick_d["created_from"] = None
brick_d["custom_mat_name"] = None
def set_brick_val(bricksdict, loc=None, key=None, action="ADD"):
assert loc or key
loc = loc or get_dict_loc(bricksdict, key)
key = key or list_to_str(loc)
adj_keys = get_adj_keys(bricksdict, loc=loc)
adj_brick_vals = [bricksdict[k]["val"] for k in adj_keys]
if action == "ADD" and (0 in adj_brick_vals or len(adj_brick_vals) < 6 or min(adj_brick_vals) == 1):
new_val = 1
elif action == "REMOVE":
new_val = 0 if 0 in adj_brick_vals or len(adj_brick_vals) < 6 else (max(adj_brick_vals) - 0.01)
else:
new_val = max(adj_brick_vals) - 0.01
bricksdict[key]["val"] = new_val
return new_val
def get_adj_keys(bricksdict, loc=None, key=None):
assert loc or key
x, y, z = loc or get_dict_loc(bricksdict, key)
adj_keys = set((
list_to_str((x+1, y, z)),
list_to_str((x-1, y, z)),
list_to_str((x, y+1, z)),
list_to_str((x, y-1, z)),
list_to_str((x, y, z+1)),
list_to_str((x, y, z-1)),
))
for k in adj_keys.copy():
if bricksdict.get(k) is None:
adj_keys.remove(k)
return adj_keys
def update_merged_keys_in_bricksdict(bricksdict, key, merged_keys, brick_size, brick_type, short_type, tall_type, set_attempted_merge=False):
# store the best brick size to origin brick
brick_d = bricksdict[key]
brick_d["size"] = brick_size
# set attributes for merged brick keys
for k in merged_keys:
brick_d0 = bricksdict[k]
if set_attempted_merge:
brick_d0["attempted_merge"] = True
if k == key:
brick_d0["parent"] = "self" if k == key else key
else:
brick_d0["parent"] = key
brick_d0["size"] = None
# set brick type if necessary
if flat_brick_type(brick_type):
brick_d0["type"] = short_type if brick_size[2] == 1 else tall_type
# set flipped and rotated
if brick_d["type"] == "SLOPE":
set_flipped_and_rotated(brick_d, bricksdict, keys_in_brick)
if brick_type == "SLOPES":
set_brick_type_for_slope(brick_d, bricksdict, keys_in_brick)
def get_new_parent_key_loc_and_size_flipped(size, loc, zstep):
# switch to origin brick
new_loc = loc.copy()
if size[0] < 0:
new_loc[0] -= abs(size[0]) - 1
if size[1] < 0:
new_loc[1] -= abs(size[1]) - 1
if size[2] < 0:
new_loc[2] -= abs(size[2] // zstep) - 1
new_key = list_to_str(new_loc)
# store the biggest brick size to origin brick
new_size = [abs(v) for v in size]
return new_key, new_loc, new_size
def get_new_parent_key_loc_and_size_added(old_size, size, loc, zstep):
# switch to origin brick
new_loc = loc.copy()
if size[0] < 0:
new_loc[0] += (old_size[0] - abs(size[0]))
if size[1] < 0:
new_loc[1] += (old_size[1] - abs(size[1]))
if size[2] < 0:
new_loc[2] += (old_size[2] - abs(size[2]))
new_key = list_to_str(new_loc)
# store the biggest brick size to origin brick
new_size = [abs(v) for v in size]
return new_key, new_loc, new_size
def should_draw_brick(brick_d, draw_threshold):
# make sure the bricks are close enough to the shell
return brick_d["val"] >= draw_threshold and not brick_d["omitted"]
def get_most_common_dir(i_s, i_e, norms):
return most_common([n[i_s:i_e] for n in norms])
def set_brick_type_for_slope(parent_brick_d, bricksdict, keys_in_brick):
norms = [bricksdict[k]["near_normal"] for k in keys_in_brick if bricksdict[k]["near_normal"] is not None]
dir0 = get_most_common_dir(0, 1, norms) if len(norms) != 0 else ""
if (dir0 == "^" and is_legal_brick_size(size=parent_brick_d["size"], type="SLOPE") and parent_brick_d["top_exposed"]):
typ = "SLOPE"
elif (dir0 == "v" and is_legal_brick_size(size=parent_brick_d["size"], type="SLOPE_INVERTED") and parent_brick_d["bot_exposed"]):
typ = "SLOPE_INVERTED"
else:
typ = "STANDARD"
parent_brick_d["type"] = typ
def set_flipped_and_rotated(parent_brick_d, bricksdict, keys_in_brick):
norms = [bricksdict[k]["near_normal"] for k in keys_in_brick if bricksdict[k]["near_normal"] is not None]
dir1 = get_most_common_dir(1, 3, norms) if len(norms) != 0 else ""
flip, rot = get_flip_rot(dir1)
# set flipped and rotated
parent_brick_d["flipped"] = flip
parent_brick_d["rotated"] = rot
|
"""
Mock classes to load in test_dynamicloader.py
"""
class Parent:
pass
class Parent2:
pass
def afunction():
pass
class AClass(Parent, Parent2):
anattr = "asdf"
afunc = afunction
class BClass:
def __init__():
self.b_method = CClass()
def a_method(self):
pass
class CClass(AClass):
def __call__(self, argb, argc, **kwords):
pass
class DClass(CClass):
pass
def AFunction():
return 412314
def BFunction(arg, argz, argx):
pass
def GFunction():
yield "Hello"
yield "World"
AFunction.afuncattr = None
for i in [Parent, Parent2, AClass, BClass, CClass,
AFunction, BFunction, GFunction]:
# Get the full module.class "path" of a class
i.__fullname__ = i.__module__ + "." + i.__name__
avariable = True
|
keys = dict(
consumer_key = 'your_consumer_key',
consumer_secret = 'your_consumer_secret',
access_token = 'your_access_token',
access_token_secret = 'your_token_secret',
)
|
"""Return sensors and headshape positions"""
'''sensors.locationsbyindex(datapdf, ch.channelindexcfg)'''
from pdf2py import pdf, channel
from numpy import zeros, array, size, append, reshape
class locations:
def __init__(self, datapdf):
'''returns unsorted meg signal sensors... A1,A2,etc'''
pdfinstance=pdf.read(datapdf)
for i in range(0, size(pdfinstance.cfg.channel_data)):
if i==1: #create empty array
megchlpos=array([])
megchupos=array([])
refshpos=array([])
megchldir=array([])
megchudir=array([])
refshdir=array([])
self.name = array([])
if pdfinstance.cfg.channel_data[i].type==1: #get MEG positions in array
#megchlpos2=append(megchlpos,pdfinstance.cfg.channel_data[pdfinstance.cfg.channel_data[223].chan_no-1].device_data.loop_data[0].position)
megchlpos=append(megchlpos,pdfinstance.cfg.channel_data[i].device_data.loop_data[0].position)
megchupos=append(megchupos,pdfinstance.cfg.channel_data[i].device_data.loop_data[1].position)
megchldir=append(megchldir,pdfinstance.cfg.channel_data[i].device_data.loop_data[0].direction)
megchudir=append(megchudir,pdfinstance.cfg.channel_data[i].device_data.loop_data[1].direction)
self.name = append(self.name, pdfinstance.cfg.channel_data[i].name)
if pdfinstance.cfg.channel_data[i].type==3: #get ref positions in array
refshpos=append(refshpos,pdfinstance.cfg.channel_data[i].device_data.loop_data[0].position)
refshdir=append(refshdir,pdfinstance.cfg.channel_data[i].device_data.loop_data[0].position)
#reshape arrays
megchlpos=megchlpos.reshape(size(megchlpos)/3,3)
megchupos=megchupos.reshape(size(megchupos)/3,3)
refshpos=refshpos.reshape(size(refshpos)/3,3)
megchldir=megchldir.reshape(size(megchldir)/3,3)
megchudir=megchudir.reshape(size(megchudir)/3,3)
refshdir=refshdir.reshape(size(refshdir)/3,3)
self.megchlpos=megchlpos
self.megchupos=megchupos
self.megchldir=megchldir
self.megchudir=megchudir
class locationsbyindex:
def __init__(self, datapdf, index):
'''returns sensor locations from an index provided
sensors.locationsbyindex(datapdf, ch.channelindexcfg)'''
pdfinstance=pdf.read(datapdf)
self.chlpos=array([]);self.chupos=array([])
self.chldir=array([]);self.chudir=array([])
self.chname = array([])
for i in index:
self.chupos = append(self.chupos,pdfinstance.cfg.channel_data[i].device_data.loop_data[0].position)
self.chlpos = append(self.chlpos,pdfinstance.cfg.channel_data[i].device_data.loop_data[1].position)
self.chudir = append(self.chudir,pdfinstance.cfg.channel_data[i].device_data.loop_data[0].direction)
self.chldir = append(self.chldir,pdfinstance.cfg.channel_data[i].device_data.loop_data[1].direction)
self.chname = append(self.chname, pdfinstance.cfg.channel_data[i].name)
self.chupos=self.chupos.reshape(size(self.chupos)/3,3)
self.chlpos=self.chlpos.reshape(size(self.chlpos)/3,3)
self.chudir=self.chudir.reshape(size(self.chudir)/3,3)
self.chldir=self.chldir.reshape(size(self.chldir)/3,3)
|
"""
Created on Fri Jan 31 13:06:41 2014
@author: samantha
SciPy and NumPy
"""
import numpy as np
from scipy.optimize import curve_fit
def func(x, a, b, c):
return a * np.exp(-(x-b)**2/(2*c**2))
x = np.linspace(0,10,100)
y = func(x,1,5,2)
yn = y + 0.2 * np.random.normal(size=len(x))
popt, pcov = curve_fit(func, x, yn)
print(popt)
fit_a = popt[0]
fit_b = popt[1]
fit_c = popt[2]
fit_yn = func(x, fit_a, fit_b, fit_c)
import pylab as pl
pl.figure(figsize=(8,6), dpi=80)
pl.scatter(x,yn, color='green', label='Gaussian smeared data')
pl.plot(x,y,color='red', linestyle='-', linewidth=2.5, label='Original line')
pl.plot(x, fit_yn, linestyle='-', color='blue', linewidth=2.5, label='Fitted line')
pl.legend(loc='upper left')
pl.show()
savefig("Exe2_311.png", dpi=72)
|
from __future__ import absolute_import
from celery import group
from celery.utils.log import get_task_logger
from lib import ConfigManager, get_storage_helper
from lib.inspection import HttpScreenshotter
from lib.sqlalchemy import get_endpoint_information_for_web_service
from ......app import websight_app
from .....base import ServiceTask, WebServiceTask
logger = get_task_logger(__name__)
config = ConfigManager.instance()
def get_url_paths_to_screenshot(service_uuid=None, scan_uuid=None, db_session=None):
"""
Get a list of URL paths to screenshot for the given web service.
:param service_uuid: The UUID of the service to get URL paths for.
:param scan_uuid: The UUID of the scan to retrieve URL paths in.
:param db_session: A SQLAlchemy session.
:return: A list of URL paths to screenshot for the given web service.
"""
return ["/"]
def upload_screenshot_to_s3(org_uuid=None, local_file_path=None):
"""
Upload the screenshot at the given file path to AWS S3 and return data about where the file
was uploaded to.
:param org_uuid: The UUID of the organization that owns the screenshot.
:param local_file_path: The local file path where the screenshot can be found.
:return: A tuple containing (1) the bucket where the file was uploaded to and (2) the key it was
uploaded under.
"""
storage_helper = get_storage_helper()
logger.info(
"Uploading HTTP screenshot at %s to S3."
% (local_file_path,)
)
response, key = storage_helper.upload_screenshot(
org_uuid=org_uuid,
local_file_path=local_file_path,
bucket=config.storage_bucket,
)
logger.info(
"HTTP screenshot at %s successfully uploaded for organization %s. Bucket is %s, key is %s."
% (local_file_path, org_uuid, config.storage_bucket, key)
)
return config.storage_bucket, key
@websight_app.task(bind=True, base=WebServiceTask)
def screenshot_web_service(
self,
web_service_uuid=None,
org_uuid=None,
web_service_scan_uuid=None,
order_uuid=None,
):
"""
Take screenshots of all the relevant endpoints for the given web service.
:param web_service_uuid: The UUID of the web service to take screenshots for.
:param org_uuid: The UUID of the organization that owns the web service.
:param web_service_scan_uuid: The UUID of the scan that this screenshotting is being done in.
:return: None
"""
logger.info(
"Now taking screenshots of all relevant endpoints for web service %s. Organization is %s, scan is %s."
% (web_service_uuid, org_uuid, web_service_scan_uuid)
)
url_paths = get_url_paths_to_screenshot(
service_uuid=web_service_uuid,
db_session=self.db_session,
scan_uuid=web_service_scan_uuid,
)
logger.info(
"A total of %s URL paths remain to be screenshotted for web service %s."
% (len(url_paths), web_service_uuid)
)
task_sigs = []
for url_path in url_paths:
task_sigs.append(screenshot_web_service_url.si(
web_service_uuid=web_service_uuid,
org_uuid=org_uuid,
web_service_scan_uuid=web_service_scan_uuid,
url_path=url_path,
order_uuid=order_uuid,
))
canvas_sig = group(task_sigs)
self.finish_after(signature=canvas_sig)
@websight_app.task(bind=True, base=WebServiceTask)
def screenshot_web_service_url(
self,
web_service_uuid=None,
org_uuid=None,
web_service_scan_uuid=None,
url_path=None,
order_uuid=None,
):
"""
Record a screenshot for the given web service and the given URL path.
:param web_service_uuid: The UUID of the web service to screenshot.
:param org_uuid: The UUID of the organization to collect the screenshot on behalf of.
:param web_service_scan_uuid: The UUID of the scan that this screenshot is being taken for.
:param url_path: The URL path to request.
:return: None
"""
logger.info(
"Now screenshotting URL path of %s for web service %s. Organization is %s, scan is %s."
% (url_path, web_service_uuid, org_uuid, web_service_scan_uuid)
)
screenshotter = HttpScreenshotter()
ip_address, port, hostname, use_ssl = get_endpoint_information_for_web_service(
web_service_uuid=web_service_uuid,
db_session=self.db_session,
)
file_path, was_successful = screenshotter.screenshot_endpoint(
ip_address=ip_address,
port=port,
use_ssl=use_ssl,
hostname=hostname,
in_separate_process=False,
)
if not was_successful:
logger.warning(
"Screenshotting endpoint for service %s at URL path %s failed."
% (web_service_uuid, url_path)
)
return
logger.info(
"Screenshot taken successfully (file path %s). Now uploading to S3."
% (file_path,)
)
bucket, key = upload_screenshot_to_s3(org_uuid=org_uuid, local_file_path=file_path)
screenshot_model = screenshotter.to_es_model(model_uuid=web_service_scan_uuid, db_session=self.db_session)
screenshot_model.set_s3_attributes(bucket=bucket, key=key, file_type="http screenshot")
screenshot_model.save(index=org_uuid)
screenshotter.clean_up()
logger.info(
"Successfully took screenshot of service %s at path %s for organization %s. Scan is %s."
% (web_service_uuid, url_path, org_uuid, web_service_scan_uuid)
)
|
import random
general_skills = ( 'Astrogation (Int)',
'Athletics (Br)',
'Charm (Pr)',
'Coercion (Will)',
'Computers (Int)',
'Cool (Pr)',
'Coordination (Ag)',
'Deception (Cun)',
'Discipline (Will)',
'Leadership (Pr)',
'Mechanics (Int)',
'Medicine (Int)',
'Negotiaion (Pr)',
'Perception (Cun)',
'Piloting – Planetary (Ag)',
'Piloting – Space (Ag)',
'Resilience (Br)',
'Skulduggery (Cun)',
'Stealth (Ag)',
'Streetwise (Cun)',
'Survival (Cun)',
'Vigilance (Will)' )
combat_skills = ( 'Brawl (Br)',
'Gunnery (Ag)',
'Melee (Br)',
'Ranged – Light (Ag)',
'Ranged – Heavy (Ag)' )
knowledge_skills = ( 'Core Worlds (Int)',
'Education (Int)',
'Lore (Int)',
'Outer Rim (Int)',
'Underworld (Int)',
'Warfare (Int)',
'Xenology (Int)' )
equipment = [ 'a pocket full of nothing..., '
'Blaster dmg 6 cit 3, medium range, stun setting',
'Heavy Blaster dmg 7 crit 3, medium range, stun setting',
'Knife: dmg +1 crit 3, engaged',
'Blaster Rfle: dmg 9 crit 3, long range, stun setting',
'Thermal Detonator: dmg 20 crit 2, short range, Blast 15, Breach 1, Vicious 4',
'Duel Light Blasters(per gun): dmg 5 crit 4, range medium',
'rope',
'500 credits',
'1000 credits', ]
armor = [ 'Normal clothing or uniform, '
'Heavy Cloth: def 0 soak 1 enc 1',
'Armored Cloth: def 1 soak 1 enc 3',
'Laminate: def 0 soak 2 enc 4',
'Heavy Environmental: def 1 soak 3, enc 4',
'Heavy Battle: def 1 soak 3 enc 6' ]
abilities = { 'arcona' :[ [1,2,2,2,3,2,10,10], ['Brawn', 'Willpower'], ['1 Vigilance (Will), hot enviro, Mood Readers'] ],
'besalisk' :[ [3,1,2,1,2,2,11, 8], ['Brawn', 'Willpower'], ['1 Resilience (Br), Additional Limbs'] ],
'clawdite' :[ [2,2,2,3,1,2,10,10], ['Brawn', 'Willpower'], ['1 Resilience (Br), Changling'] ],
'devaronian' :[ [2,2,2,3,2,1,11,10], ['Brawn', 'Willpower'], ['1 Survival (Cun) or 1 Deception (Cun), 1 automatic success on Resilience checks'] ],
'duros' :[ [1,2,3,2,2,2,11,10], ['Brawn', 'Willpower'], ['1 Piloting - Space (Ag), Intuitive Naviagation Talent'] ],
'human - Corelian' :[ [2,2,2,2,2,2,10,10], ['Brawn', 'Willpower'], ['1 Piloting - Space (Ag) or Planetary (Ag) (may train to 3)'] ],
'human - Mandalorian' :[ [2,2,2,2,2,2,11,10], ['Brawn', 'Willpower'], ['1 combat skill or 1 in two Knowledge skills of choice'] ],
'hutt' :[ [3,1,2,2,3,2,13,11], ['Brawn', 'Willpower'], ['1 Coercion or Discipline, 1 Enduring Talent'] ],
'human' :[ [2,2,2,2,2,2,10,10], ['Brawn', 'Willpower'], ['2 free non career skills'] ],
'jawa' :[ [1,2,2,2,2,1, 6,10], ['Brawn', 'Willpower'], ['2 Computer (Int) or 2 Mechanics (Int), 1 Outer Rim (Int)'] ],
'kel dor' :[ [1,2,2,2,3,2,10,10], ['Brawn', 'Willpower'], ['Dark Vision -2 Diff', 'Education 1 (int)'] ] }
career = { 'pirate' :[ [0,0,0,0,0,0,0,0], [2,1,0], [equipment[0]], [1, equipment], armor[1], ['+1 on Skullduggary attempts'] ],
'trooper' :[ [1,0,0,0,0,0,3,3], [1,3,0], [equipment[4]], [1, equipment], armor[4], [''] ],
'monk' :[ [0,0,0,0,0,1,1,1], [3,0,3], [equipment[0]], [0, equipment], armor[1], ['Command Rank 2'] ],
'bounty hunter':[ [0,0,0,1,0,0,0,0], [1,2,0], [equipment[0]], [2, equipment], armor[2], [''] ] }
type_rules = { 'minion' : [ ["Does not suffer strain or posess skills.", "They can fight as a group and be killed by crits."],\
[0,0,0,0,0,0,-6,0], [0,0,0] ],
'rival' : [ ["Will die after exceeding wound threshold.", "Suffers wounds for strain."], [0,0,0,0,0,0,4,4], [1,0,0] ],
'nemesis' : [ ["Standard Rules."], [1,1,1,1,1,1,8,8], [2,1,1] ] }
class npc():
def __init__(self, name, species, npc_class, npc_type):
self.name = name
self.species = species
self.npc_class = str(npc_class)
if npc_type == 'minion' or npc_type == 'rival' or npc_type == 'nemesis':
self.npc_type = npc_type
else:
print('\n', "Must choose 'minion', 'rival', or 'nemeses'.",'\n\n')
print(" Example calling npc object: npc('Duke Skyhawker', 'human', 'pirate', 'rival')", '\n')
raise SystemExit
def characteristics(self):
base_abilities = abilities[self.species][0]
ability_list = [ 'Brawn', 'Agility', 'Intellect', 'Cunning', 'Willpower', 'Presence', 'Wound', 'Strain' ]
ability_total = dict(zip(ability_list, base_abilities))
ability_total['Brawn'] += career[self.npc_class][0][0] + type_rules[self.npc_type][1][0]
ability_total['Agility'] += career[self.npc_class][0][1] + type_rules[self.npc_type][1][1]
ability_total['Intellect'] += career[self.npc_class][0][2] + type_rules[self.npc_type][1][2]
ability_total['Cunning'] += career[self.npc_class][0][3] + type_rules[self.npc_type][1][3]
ability_total['Willpower'] += career[self.npc_class][0][4] + type_rules[self.npc_type][1][4]
ability_total['Presence'] += career[self.npc_class][0][5] + type_rules[self.npc_type][1][5]
ability_total['Wound'] += career[self.npc_class][0][6] + type_rules[self.npc_type][1][6]
ability_total['Strain'] += career[self.npc_class][0][7] + type_rules[self.npc_type][1][7]
return ability_total
def general_skills(self):
if self.npc_type == 'minion':
skill_limit = 2
elif self.npc_type == 'rival':
skill_limit = 3
elif self.npc_type == 'nemesis':
skill_limit = 4
try:
qty = career[self.npc_class][1][0] + type_rules[self.npc_type][2][0]
return dict(zip(random.sample(general_skills,qty),[(random.randint(0,skill_limit) + 1) for number in range(1,qty + 1)]))
except ValueError:
return "Max amount of knowledge skills is 22"
def combat_skills(self):
if self.npc_type == 'minion':
skill_limit = 2
elif self.npc_type == 'rival':
skill_limit = 3
elif self.npc_type == 'nemesis':
skill_limit = 4
try:
qty = career[self.npc_class][1][1] + type_rules[self.npc_type][2][1]
return dict(zip(random.sample(combat_skills,qty),[(random.randint(0,skill_limit) + 1) for number in range(1,qty + 1)]))
except ValueError:
return "Max amount of knowledge skills is 5"
def knowledge_skills(self):
if self.npc_type == 'minion':
skill_limit = 2
elif self.npc_type == 'rival':
skill_limit = 3
elif self.npc_type == 'nemesis':
skill_limit = 4
try:
qty = career[self.npc_class][1][2] + type_rules[self.npc_type][2][2]
return dict(zip(random.sample(knowledge_skills,qty),[(random.randint(0,skill_limit) + 1) for number in range(1,qty + 1)]))
except ValueError:
return "Max amount of knowledge skills is 7"
def rules(self):
return type_rules[self.npc_type][0]
def armor(self):
return career[self.npc_class][4]
def equipment(self):
return career[self.npc_class][2] + random.sample(career[self.npc_class][3][1],career[self.npc_class][3][0])
def talents(self):
return career[self.npc_class][5] + abilities[self.species][2]
|
import unittest
from ofxstatement.plugins.utils import \
clean_multiple_whitespaces, fix_amount_string
class TestCleanMultipleWhiteSpaces(unittest.TestCase):
"""Unit tests for clean_multiple_whitespaces helper."""
expected = "This is a test"
def test_just_spaces(self):
self.assertEqual(
clean_multiple_whitespaces("This is a test"), self.expected)
def test_just_tabs(self):
self.assertEqual(
clean_multiple_whitespaces("This is a test"), self.expected)
def test_mixed_tabs_and_spaces(self):
self.assertEqual(
clean_multiple_whitespaces(" This is a test "), self.expected)
def test_empty_string(self):
self.assertEqual(clean_multiple_whitespaces(""), "")
def test_string_with_spaces(self):
self.assertEqual(clean_multiple_whitespaces(" "), "")
class TestFixAmountString(unittest.TestCase):
"""Unit tests for fix_amount_string helper."""
def test_integer_string(self):
self.assertEqual(fix_amount_string("11"), "11")
def test_no_thousand_mark(self):
self.assertEqual(fix_amount_string("1,23"), "1.23")
def test_with_thousand_mark(self):
self.assertEqual(fix_amount_string("100.234,23"), "100234.23")
|
from sqlalchemy import (
event, Column, Integer, String, DateTime, UnicodeText, Enum, Text
)
from datetime import datetime, timedelta
from shortid import ShortId
from werkzeug.security import generate_password_hash
from kodiak.database import Base
class Timestamp(object):
created = Column(DateTime, default=datetime.now())
updated = Column(DateTime, default=datetime.now())
@event.listens_for(Timestamp, 'before_update', propagate=True)
def timestamp_before_update(mapper, connection, target):
target.updated = datetime.now()
class Page(Timestamp, Base):
__tablename__ = 'pages'
id = Column(Integer, primary_key=True)
key = Column(String(16))
access = Column(Enum('private', 'public', 'limited'))
data = Column(UnicodeText())
slug = Column(UnicodeText())
title = Column(UnicodeText())
published = Column(DateTime)
def view_url(self):
if self.published is None:
return None
if self.slug is not None:
dir_name = self.slug
else:
dir_name = self.key
if self.access == 'public':
return dir_name
elif self.access == 'limited':
return '%s?key=%s' % (dir_name, self.key)
else:
return 'private/%s' % self.key
def is_outdated(self):
if self.published is None:
return True
print (self.updated - self.published)
if self.updated - self.published > timedelta(milliseconds=3000):
return True
return False
def __init__(self, data, access='limited'):
self.data = data
self.access = access
sid = ShortId()
self.key = sid.generate()
def __repr__(self):
return '<Page %r>' % (self.id)
class User(Timestamp, Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
username = Column(String(120), unique=True)
password = Column(Text())
def __init__(self, username, password):
self.username = username
self.password = generate_password_hash(password)
def __repr__(self):
return '<User %r>' % (self.username)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def get_id(self):
return unicode(self.id)
|
import shutil
import struct
from datetime import datetime
import os
import re
import binascii
from zipfile import ZipFile, ZipInfo
import time
from sparchive import mkstemppath
class Archive(object):
def __init__(self, archive_path, compress_module):
self.archive_path = archive_path
self.compress_module = compress_module
@staticmethod
def get_mtime_as_utcdatetime(path):
return datetime.utcfromtimestamp(os.path.getmtime(path))
@staticmethod
def unixtime_to_utcziptime(utime):
epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
if utime < epoch: utime = epoch
return time.gmtime(utime)[:6]
@staticmethod
def _split_path(info):
"""Splits a ZipInfo path into a versionnumber, path tuple."""
md = re.match(r"^versions/([0-9]+)/(.*)$", info.filename)
if md is None:
raise Exception()
return (int(md.group(1)), md.group(2))
def get_version_count(self, zippath):
if not(os.path.exists(zippath)):
return 0
else:
version_count = 0
with ZipFile(zippath, mode='r', allowZip64=True) as myzip:
for info in myzip.infolist():
this_version = Archive._split_path(info)[0]
if (this_version+1) > version_count:
version_count = (this_version+1)
return version_count
ZIP_EXT_ATTR_FILE = 0o100000
ZIP_EXT_ATTR_DIR = 0o040000
ZIP_EXT_ATTR_LINK = 0o120000
def _add_path(self, path, version, myzip):
mtime = os.path.getmtime(path)
info = ZipInfo("versions/%d/%s"%(version, path), Archive.unixtime_to_utcziptime(mtime))
info.create_system = 3
info.extra += struct.pack('<HHBl', 0x5455, 5, 1, int(mtime))
# http://unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute
# make mode without file type, which may be system-specific
clean_mode = os.stat(path).st_mode & 0o007777
if (os.path.islink(path)):
# set zip file type to link
info.external_attr = (Archive.ZIP_EXT_ATTR_LINK | clean_mode) << 16
myzip.writestr(info, os.readlink(path))
elif (os.path.isdir(path)):
# set zip file type to dir
info.external_attr = (Archive.ZIP_EXT_ATTR_DIR | clean_mode) << 16
# dos directory flag
info.external_attr |= 0x10
# it seems we should have a trailing slash for dirs
if not(info.filename.endswith('/')): info.filename = "%s/"%(info.filename)
myzip.writestr(info, '')
for name in os.listdir(path):
self._add_path(os.path.join(path, name), version, myzip)
elif (os.path.isfile(path)):
info.external_attr = (Archive.ZIP_EXT_ATTR_FILE | clean_mode) << 16
myzip.writestr(info, open(path, 'rb').read())
else:
raise Exception()
def add_version(self, pathlist):
"""Add a version to this archive."""
if not(os.path.isdir(os.path.dirname(os.path.abspath(self.archive_path)))):
os.makedirs(os.path.dirname(os.path.abspath(self.archive_path)))
for path in pathlist:
if not(os.path.exists(path)):
raise Exception()
with self.compress_module.TempUncompress(self.archive_path) as zippath:
new_version = self.get_version_count(zippath)
with ZipFile(zippath, mode='a', allowZip64=True) as myzip:
for path in pathlist:
self._add_path(path, new_version, myzip)
tmprzip = mkstemppath()
self.compress_module.compress(zippath, tmprzip)
# perform sanity checks here
shutil.move(tmprzip, self.archive_path)
@staticmethod
def parse_extra(info):
pos = 0
extra = {}
while (pos < len(info.extra)):
header, size = struct.unpack_from('<HH', info.extra, pos)
pos += 4
extra[header] = info.extra[pos:(pos + size)]
pos += size
return extra
@staticmethod
def parse_extended_mtime(info):
extra = Archive.parse_extra(info)
if 0x5455 in extra:
flags, mtime = struct.unpack("<Bl", extra[0x5455])
return mtime
else:
return None
@staticmethod
def _zip_versions(myzip):
"""Returns list of versions in the zip, where each version is a set
of filename, crc tuples [[("a", crca1)], [("a", crca2), ("b", crcb2)]]".
"""
retval = []
for info in myzip.infolist():
(version, p) = Archive._split_path(info)
while len(retval) <= version:
retval.append([])
retval[version].append((p, info.CRC))
return retval
def has_version(self, path):
"""Return true if the archive already has a version that matchs path."""
def mk_filename_set():
if os.path.isfile(path):
return set([path])
else:
filenames = [path + "/"]
for root, dirs, files in os.walk(path):
filenames += [ os.path.join(root, filename) for filename in files ]
filenames += [ os.path.join(root, d) + "/" for d in dirs ]
return set(filenames)
filename_set = mk_filename_set()
filename_crc_set = None
if (not(os.path.exists(self.archive_path))):
return None
else:
with self.compress_module.TempUncompress(self.archive_path) as zippath:
with ZipFile(zippath, mode='r', allowZip64=True) as myzip:
versions = Archive._zip_versions(myzip)
for (versionno, version) in enumerate(versions):
# first check the files without CRC
version_files = [ p[0] for p in version ]
if set(version_files) == filename_set:
if filename_crc_set is None:
filename_crc_set = set([ (f, Archive._crc32(f)) for f in filename_set ])
if set(version) == filename_crc_set:
return versionno
return None
@staticmethod
def _crc32(filename):
if os.path.islink(filename):
return binascii.crc32(os.readlink(filename).encode('utf-8')) & 0xffffffff
elif os.path.isdir(filename):
return 0
elif os.path.isfile(filename):
with open(filename, 'rb') as i:
sofar = 0
buff = i.read(16384)
while (buff != b""):
sofar = (binascii.crc32(buff, sofar) & 0xffffffff)
buff = i.read(16384)
return sofar
else:
raise Exception()
@staticmethod
def islink_entry(info):
return ((info.external_attr >> 16) & Archive.ZIP_EXT_ATTR_LINK) == Archive.ZIP_EXT_ATTR_LINK
@staticmethod
def isdir_entry(info):
return ((info.external_attr >> 16) & Archive.ZIP_EXT_ATTR_DIR) == Archive.ZIP_EXT_ATTR_DIR
def _extract_entry(self, myzip, info, destdir):
dest = os.path.normpath(os.path.join(destdir, info.filename[9:])) # drop leading versions/
parentdirs = os.path.dirname(dest)
if parentdirs and not os.path.exists(parentdirs):
os.makedirs(parentdirs)
if (os.path.exists(dest)):
raise Exception()
if Archive.islink_entry(info):
i = myzip.open(info, 'r')
target = i.read()
i.close()
os.symlink(target, dest)
else:
if (info.filename[-1] == '/' or Archive.isdir_entry(info)) and not(os.path.isdir(dest)):
os.mkdir(dest)
else:
i = myzip.open(info, 'r')
o = open(dest, "wb")
shutil.copyfileobj(i, o)
i.close()
o.close()
# parse extended datetime
mtime = Archive.parse_extended_mtime(info)
if mtime is not None:
os.utime(dest, (mtime, mtime))
# extract permissions
os.chmod(dest, info.external_attr >> 16 & 0o007777)
def extract(self, dest, number=None):
"""Extract a version."""
archivename = os.path.basename(self.archive_path).split('.')[0]
with self.compress_module.TempUncompress(self.archive_path) as zippath:
with ZipFile(zippath, 'r') as myzip:
for info in myzip.infolist():
# call _split_path for every entry to ensure they
# are name properly
versionno, name = Archive._split_path(info)
if number is None or (number == versionno):
self._extract_entry(myzip, info, os.path.join(dest, archivename))
def list(self):
with self.compress_module.TempUncompress(self.archive_path) as zippath:
with ZipFile(zippath, 'r') as myzip:
retval = {}
for info in myzip.infolist():
(version, path) = Archive._split_path(info)
if not(version in retval): retval[version] = []
retval[version].append((path, info))
return retval
|
"""Endpoints for the UpSkilling tool.
See http://go/jobflix:reco-design
"""
import random
import string
import typing
from typing import Any, Dict, List, Optional, Sequence, Set
import flask
from bob_emploi.frontend.api import job_pb2
from bob_emploi.frontend.api import upskilling_pb2
from bob_emploi.frontend.api import user_pb2
from bob_emploi.frontend.server import i18n
from bob_emploi.frontend.server import jobs
from bob_emploi.frontend.server import mongo
from bob_emploi.frontend.server import proto_flask
from bob_emploi.frontend.server import scoring
app = flask.Blueprint('upskilling', __name__)
_MAX_SHOWN_SECTORS = 3
_HIDDEN_SECTOR_IDS = {
# Seasonal jobs.
'17035', 'seasonal',
# Low qualification jobs.
'17039', 'no-qualif',
}
def _create_random_seed() -> str:
return ''.join(random.choice(string.ascii_lowercase) for i in range(10))
class _InvalidState(ValueError):
...
class _ComputedSection(typing.NamedTuple):
# Jobs for the section.
jobs: List[upskilling_pb2.Job]
# Maybe an updated ID.
new_id: Optional[str] = None
# Maybe an updated name.
new_name: Optional[str] = None
# Maybe a state param to be used at a later time.
state: Optional[str] = None
def __bool__(self) -> bool:
return bool(self.jobs)
def _get_best_jobs_in_area(scoring_project: scoring.ScoringProject) -> job_pb2.BestJobsInArea:
area_id = scoring_project.details.city.departement_id
return jobs.get_best_jobs_in_area(
mongo.HashableNoPiiMongoDatabase(scoring_project.database), area_id)
class _Generator:
_num_jobs_for_first_batch = 10
@property
def name(self) -> str:
"""Get the default name of the sections generated by this object."""
raise NotImplementedError
def get_jobs(
self, *, scoring_project: scoring.ScoringProject, allowed_job_ids: Set[str],
previous_sections: Set[str]) -> Optional[_ComputedSection]:
"""Generate a section."""
raise NotImplementedError
def get_more_jobs(
self, *, scoring_project: scoring.ScoringProject, section_id: str,
state: str) -> upskilling_pb2.Section:
"""Generate more jobs for a given section."""
raise NotImplementedError
class _RandomGenerator(_Generator):
name = i18n.make_translatable_string('Des métiers au hasard')
def get_jobs(
self, *, allowed_job_ids: Set[str], **unused_kwargs: Any) -> Optional[_ComputedSection]:
seed = _create_random_seed()
randomizer = random.Random(seed)
num_jobs = min(self._num_jobs_for_first_batch, len(allowed_job_ids))
return _ComputedSection(
[
upskilling_pb2.Job(job_group=job_pb2.JobGroup(rome_id=rome_id))
for rome_id in randomizer.sample(allowed_job_ids, num_jobs)
],
state=seed)
def get_more_jobs(
self, *, scoring_project: scoring.ScoringProject, section_id: str, # pylint: disable=unused-argument
state: str) -> upskilling_pb2.Section:
"""Generate more jobs for a given section."""
randomizer = random.Random(state)
good_jobs = jobs.get_all_good_job_group_ids(scoring_project.database)
num_jobs = min(30, len(good_jobs))
return upskilling_pb2.Section(jobs=[
upskilling_pb2.Job(job_group=job_pb2.JobGroup(rome_id=rome_id))
for rome_id in randomizer.sample(good_jobs, num_jobs)[self._num_jobs_for_first_batch:]
])
class _BestJobsGenerator(_Generator):
# This can be overriden in subclasses.
def _create_job(self, related_job_group: job_pb2.RelatedJobGroup) -> upskilling_pb2.Job:
return upskilling_pb2.Job(job_group=related_job_group.job_group)
def _get_all_section_jobs(
self, *, best_jobs: job_pb2.BestJobsInArea, scoring_project: scoring.ScoringProject) \
-> Sequence[job_pb2.RelatedJobGroup]:
raise NotImplementedError
def get_jobs(
self, *, scoring_project: scoring.ScoringProject, allowed_job_ids: Set[str],
previous_sections: Set[str], # pylint: disable=unused-argument
) -> Optional[_ComputedSection]:
best_jobs = self._get_all_section_jobs(
best_jobs=_get_best_jobs_in_area(scoring_project),
scoring_project=scoring_project)
if not best_jobs:
return None
best_allowed_jobs = [
best_job
for best_job in best_jobs
if best_job.job_group.rome_id in allowed_job_ids
]
seed = _create_random_seed()
randomizer = random.Random(seed)
randomizer.shuffle(best_allowed_jobs)
return _ComputedSection(
[
self._create_job(best_job)
for best_job in best_allowed_jobs[:self._num_jobs_for_first_batch]
],
state=seed,
)
def get_more_jobs(
self, *, scoring_project: scoring.ScoringProject, section_id: str, # pylint: disable=unused-argument
state: str) -> upskilling_pb2.Section:
"""Generate more jobs for a given section."""
best_jobs = self._get_all_section_jobs(
best_jobs=_get_best_jobs_in_area(scoring_project),
scoring_project=scoring_project)
allowed_job_ids = jobs.get_all_good_job_group_ids(scoring_project.database)
best_allowed_jobs = [
best_job
for best_job in best_jobs
if best_job.job_group.rome_id in allowed_job_ids
]
randomizer = random.Random(state)
randomizer.shuffle(best_allowed_jobs)
return upskilling_pb2.Section(jobs=[
self._create_job(best_job)
for best_job in best_allowed_jobs[self._num_jobs_for_first_batch:]
])
class _BestLocalMarketScoreGenerator(_BestJobsGenerator):
name = i18n.make_translatable_string('Des métiers avec peu de concurrence')
def _get_all_section_jobs(
self, *, best_jobs: job_pb2.BestJobsInArea, **unused_kwargs: Any) \
-> Sequence[job_pb2.RelatedJobGroup]:
return best_jobs.best_local_market_score_jobs
class _BestRelativeScoreJobsGenerator(_BestJobsGenerator):
name = i18n.make_translatable_string('Des métiers qui recrutent bien')
def _get_all_section_jobs(
self, *, best_jobs: job_pb2.BestJobsInArea, **unused_kwargs: Any) \
-> Sequence[job_pb2.RelatedJobGroup]:
return best_jobs.best_relative_score_jobs
class _BestSalariesGenerator(_BestJobsGenerator):
name = i18n.make_translatable_string('Des métiers avec un bon salaire')
def _create_job(self, related_job_group: job_pb2.RelatedJobGroup) -> upskilling_pb2.Job:
return upskilling_pb2.Job(
job_group=related_job_group.job_group,
shown_metric=related_job_group.local_stats.imt.junior_salary.short_text,
)
def _get_all_section_jobs(
self, *, best_jobs: job_pb2.BestJobsInArea, **unused_kwargs: Any) \
-> Sequence[job_pb2.RelatedJobGroup]:
return best_jobs.best_salaries_jobs
class _BestSalariesLowQualifGenerator(_BestJobsGenerator):
name = i18n.make_translatable_string(
'Des métiers avec un bon salaire accessibles avec un Bac+2 ou moins')
max_level: 'job_pb2.DegreeLevel.V' = job_pb2.BTS_DUT_DEUG
def _create_job(self, related_job_group: job_pb2.RelatedJobGroup) -> upskilling_pb2.Job:
return upskilling_pb2.Job(
job_group=related_job_group.job_group,
shown_metric=related_job_group.local_stats.imt.junior_salary.short_text,
)
def _has_low_qualif(
self, scoring_project: scoring.ScoringProject, job: job_pb2.RelatedJobGroup) -> bool:
job_group = jobs.get_group_proto(scoring_project.database, job.job_group.rome_id)
if not job_group or not job_group.requirements.diplomas:
return False
percent_required_high_diploma = sum(
diploma.percent_required for diploma in job_group.requirements.diplomas
if diploma.diploma.level > self.max_level
)
return percent_required_high_diploma < 50
def _get_all_section_jobs(
self, *, best_jobs: job_pb2.BestJobsInArea, scoring_project: scoring.ScoringProject) \
-> Sequence[job_pb2.RelatedJobGroup]:
return [
best_job
for best_job in best_jobs.best_salaries_jobs
if self._has_low_qualif(scoring_project, best_job)
]
class _BestSalariesNoQualifGenerator(_BestSalariesLowQualifGenerator):
name = i18n.make_translatable_string('Des métiers avec un bon salaire accessibles sans diplôme')
max_level = job_pb2.NO_DEGREE
class _RandomSectorGenerator(_Generator):
name = ''
def _get_jobs_for_sector(
self, sector: job_pb2.SectorBestJobGroups, random_seed: str,
allowed_job_ids: Set[str]) -> List[job_pb2.RelatedJobGroup]:
randomizer = random.Random(random_seed)
best_local_market_score_jobs = [
best_job
for best_job in sector.best_local_market_score_jobs
if best_job.job_group.rome_id in allowed_job_ids
]
randomizer.shuffle(best_local_market_score_jobs)
return best_local_market_score_jobs
def get_jobs(
self, *, scoring_project: scoring.ScoringProject,
allowed_job_ids: Set[str], previous_sections: Set[str]) \
-> Optional[_ComputedSection]:
previous_sector_ids = {
section_id[len('sector-'):]
for section_id in previous_sections
if section_id.startswith('sector-')
}
if len(previous_sector_ids) >= _MAX_SHOWN_SECTORS:
return None
sectors = _get_best_jobs_in_area(scoring_project).sectors[:]
random.shuffle(sectors)
for sector in sectors:
if sector.sector_id in previous_sector_ids or sector.sector_id in _HIDDEN_SECTOR_IDS:
continue
random_seed = _create_random_seed()
best_local_market_score_jobs = self._get_jobs_for_sector(
sector, random_seed, allowed_job_ids)
if not best_local_market_score_jobs:
continue
return _ComputedSection(
[
upskilling_pb2.Job(job_group=best_job.job_group)
for best_job in best_local_market_score_jobs[:self._num_jobs_for_first_batch]
],
new_id=f'sector-{sector.sector_id}',
new_name=sector.description,
state=random_seed)
# All sectors have already been selected.
return None
def get_more_jobs(
self, *, scoring_project: scoring.ScoringProject, section_id: str,
state: str) -> upskilling_pb2.Section:
"""Generate more jobs for a given section."""
sector_id = section_id.replace('sector-', '')
try:
sector = next(
s for s in _get_best_jobs_in_area(scoring_project).sectors
if s.sector_id == sector_id
)
except StopIteration:
# Cannot find sector data at all.
return upskilling_pb2.Section()
good_jobs = jobs.get_all_good_job_group_ids(scoring_project.database)
sector_jobs = self._get_jobs_for_sector(sector, state, good_jobs)
return upskilling_pb2.Section(jobs=[
upskilling_pb2.Job(job_group=job.job_group)
for job in sector_jobs[self._num_jobs_for_first_batch:]
])
_SECTION_GENERATORS: Dict[str, '_Generator'] = {
'best-relative-local-score': _BestRelativeScoreJobsGenerator(),
'best-local-market-score': _BestLocalMarketScoreGenerator(),
'best-salaries': _BestSalariesGenerator(),
'random-sector': _RandomSectorGenerator(),
'best-salaries-no-qualifications': _BestSalariesNoQualifGenerator(),
'best-salaries-low-qualifications': _BestSalariesLowQualifGenerator(),
'serendipity': _RandomGenerator(),
}
_SECTION_SLOTS = [
'best-relative-local-score',
'best-local-market-score',
'best-salaries',
'random-sector',
'best-salaries-no-qualifications',
'best-salaries-low-qualifications',
'random-sector',
'serendipity',
'random-sector',
]
@app.route('/sections', methods=['POST'])
@proto_flask.api(in_type=user_pb2.User, out_type=upskilling_pb2.Sections)
def get_sections_for_project(user_proto: user_pb2.User) -> upskilling_pb2.Sections:
"""Return all the sections to browse."""
if not user_proto.projects:
flask.abort(422, i18n.flask_translate("Il n'y a pas de projet à explorer."))
project = user_proto.projects[0]
database = mongo.get_connections_from_env().stats_db
scoring_project = scoring.ScoringProject(project, user_proto, database)
result = upskilling_pb2.Sections()
good_jobs = jobs.get_all_good_job_group_ids(scoring_project.database)
for generator_id in _SECTION_SLOTS:
generator = _SECTION_GENERATORS[generator_id]
computed_section = generator.get_jobs(
scoring_project=scoring_project, allowed_job_ids=good_jobs,
previous_sections={
section.id
for section in result.sections
if section.state.startswith(f'{generator_id}:')
})
if not computed_section or len(computed_section.jobs) < 2:
continue
result.sections.add(
id=computed_section.new_id or generator_id,
state=f'{generator_id}:{computed_section.state or ""}',
name=scoring_project.populate_template(scoring_project.translate_static_string(
computed_section.new_name or generator.name)),
jobs=computed_section.jobs,
)
return result
@app.route('/sections/<section_id>/jobs/<state>', methods=['POST'])
@proto_flask.api(in_type=user_pb2.User, out_type=upskilling_pb2.Section)
def get_more_jobs(
user_proto: user_pb2.User, *, section_id: str, state: str) -> upskilling_pb2.Section:
"""Return more jobs for a given section."""
if not user_proto.projects:
flask.abort(422, i18n.flask_translate("Il n'y a pas de projet à explorer."))
try:
generator_id, section_state = state.split(':', 1)
except ValueError:
flask.abort(
422,
i18n.flask_translate("Le paramètre d'état {state} n'a pas le bon format.")
.format(state=state))
project = user_proto.projects[0]
database = mongo.get_connections_from_env().stats_db
scoring_project = scoring.ScoringProject(project, user_proto, database)
try:
generator = _SECTION_GENERATORS[generator_id]
except KeyError:
flask.abort(
404,
i18n.flask_translate('Générateur de section inconnu: {generator_id}')
.format(generator_id=generator_id))
try:
return generator.get_more_jobs(
scoring_project=scoring_project, section_id=section_id, state=section_state)
except _InvalidState:
flask.abort(
422,
i18n.flask_translate('Impossible de commencer à {start_from}')
.format(start_from=section_state))
|
from functools import wraps
from flask import request, redirect, url_for, session, flash
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not 'username' in session :
return redirect(url_for('login', next=request.url))
return f(*args, **kwargs)
return decorated_function
|
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
import sys
sys.path.append("../../../")
from Util import DilutionUtil
from PCR.Optimization.PCR_Opt_Analysis import PCR_Analyze_Objects
class Gradient:
MACHINE_TOUCHGENE = 0
MACHINE_BIORAD = 1
def __init__(self,Temperatures,ConcentrationYields,ConcentrationVolumes,
NumRepeats,Description,Machine,Date,NumVialsCombined=1):
self.Temperatures = np.array(Temperatures)
self.Concentrations = np.array(ConcentrationYields)
self.Volumes = np.array(ConcentrationVolumes)
self.Repeats = NumRepeats
self.Description = Description
self.Machine = Machine
self.Date = Date
self.NumVialsCombined = NumVialsCombined
def GetMachineName(self):
return "TouchGene" if self.Machine==Gradient.MACHINE_TOUCHGENE\
else "BioRad"
def GetYieldPer100uLTube(self):
return self.Volumes * self.Concentrations / self.NumVialsCombined
def run():
"""
Taken from notebook #2, pp 14
"""
GradientsObjs = \
[Gradient(Temperatures=[60.2,62.5,64.2,65.8],
ConcentrationYields=[76,62,40,10],
ConcentrationVolumes=35,
NumRepeats=30,
Description="Notebook#2, pp14",
Machine=Gradient.MACHINE_TOUCHGENE,
Date="???"),
Gradient(Temperatures=[60.2,62.5,64.2,65.8],
ConcentrationYields=[110,100,70,30],
ConcentrationVolumes=35,
NumRepeats=35,
Description="Notebook#2, pp14",
Machine=Gradient.MACHINE_TOUCHGENE,
Date="???"),
## 5/17/2016 data
# 35R
Gradient(Temperatures=[60,61.4,62.3,64],
ConcentrationYields=[104.1,95.1,96.7,75.7],
ConcentrationVolumes=35,
NumRepeats=35,
Description="",
Machine=Gradient.MACHINE_TOUCHGENE,
Date="5/17/2016"),
#40R
Gradient(Temperatures=[60,61.4,62.3,64],
ConcentrationYields=[146.6,149.3,147.4,106.1],
ConcentrationVolumes=35,
NumRepeats=40,
Description="",
Machine=Gradient.MACHINE_TOUCHGENE,
Date="5/17/2016"),
# 5/23 data, only one temperature and pooeled two vials
Gradient(Temperatures=[61.4],
ConcentrationYields=[464],
ConcentrationVolumes=35,
NumRepeats=40,
Description="",
Machine=Gradient.MACHINE_TOUCHGENE,
Date="5/23/2016",
NumVialsCombined=2),
## 5/24 data, ibid
Gradient(Temperatures=[61.4],
ConcentrationYields=[350],
ConcentrationVolumes=35,
NumRepeats=40,
Description="",
Machine=Gradient.MACHINE_TOUCHGENE,
Date="5/24/2016",
NumVialsCombined=2),
## 5/26 data on the biorad
Gradient(Temperatures=[60,61.3,62.5,64],
ConcentrationYields=[155.2,86.7,41.5,50],
ConcentrationVolumes=35,
NumRepeats=35,
Description="",
Machine=Gradient.MACHINE_BIORAD,
Date="5/26/2016"),
Gradient(Temperatures=[60,61.3,62.5,64],
ConcentrationYields=[172,137,127,62.6],
ConcentrationVolumes=35,
NumRepeats=40,
Description="",
Machine=Gradient.MACHINE_BIORAD,
Date="5/26/2016"),
Gradient(Temperatures=[60,61.3],
ConcentrationYields=[55,44],
ConcentrationVolumes=35,
NumRepeats=40,
Description="Gradient of ovh-2.0,labelled. T_ann too high?",
Machine=Gradient.MACHINE_BIORAD,
Date="6/2/2016"),
Gradient(Temperatures=[58,60.5,62],
ConcentrationYields=[95,91,91],
ConcentrationVolumes=35,
NumRepeats=40,
Description="Gradient of ovh-2.0,labelled, with 45s ext",
Machine=Gradient.MACHINE_BIORAD,
Date="6/3/2016"),
Gradient(Temperatures=[58,60.5,62],
ConcentrationYields=[145,105,121],
ConcentrationVolumes=35,
NumRepeats=40,
Description="Gradient of ovh-2.0,spacer, with 45s ext",
Machine=Gradient.MACHINE_BIORAD,
Date="6/3/2016"),
Gradient(Temperatures=[60],
ConcentrationYields=[91.5],
ConcentrationVolumes=70*4, # 8 tubes into 4, diluted 2-fold
NumRepeats=40,
Description="Gradient of ovh-2.0,spacer, with 45s ext",
Machine=Gradient.MACHINE_BIORAD,
Date="6/6/2016")
]
PCR_Analyze_Objects(GradientsObjs,"Ovh2.0-Spacer")
if __name__ == "__main__":
run()
|
from gi.repository import Gtk
from debugger.debugger_api import StartupInfo
from gui.dialog import FileOpenDialog
from gui.gui_util import require_gui_thread
class EnvVarWidget(Gtk.Box):
def __init__(self, *args, **kwargs):
Gtk.Box.__init__(self, *args, **kwargs)
self.set_orientation(Gtk.Orientation.HORIZONTAL)
self.entry_name = Gtk.Entry()
self.entry_value = Gtk.Entry()
self.pack_start(self.entry_name, False, False, 0)
self.pack_start(self.entry_value, False, False, 0)
@property
def name(self):
return self.entry_name.get_text()
@property
def value(self):
return self.entry_value.get_text()
class StartupDialog(object):
@require_gui_thread
def __init__(self, dialog_builder):
"""
@type dialog_builder: Gtk.Builder
"""
signals = {
"working-directory-choose":
lambda *x: self._show_choose_dir_dialog(),
"startup-cancel": lambda *x: self._cancel_dialog(),
"startup-confirm": lambda *x: self._confirm_dialog(),
"env-var-add": lambda *x: self._add_env_var()
}
self.dialog_builder = dialog_builder
self.dialog_builder.connect_signals(signals)
self.dialog = dialog_builder.get_object("startup_dialog")
self.grid = self.dialog_builder.get_object("env_var_grid")
self.env_line_mapping = []
self.working_dir_entry = self.dialog_builder.get_object(
"working_directory")
self.cmd_arguments_entry = self.dialog_builder.get_object(
"cmd_arguments")
@require_gui_thread
def show(self, startup_info):
"""
@type startup_info: debugger.debugger_api.StartupInfo
@rtype: debugger.debugger_api.StartupInfo
"""
self._set_startup_info(startup_info)
response = self.dialog.run()
self.dialog.hide()
if response != Gtk.ResponseType.YES:
return startup_info
else:
return self._construct_startup_info()
@require_gui_thread
def _set_startup_info(self, startup_info):
"""
@type startup_info: debugger.debugger_api.StartupInfo
"""
for mapping in self.env_line_mapping:
self.grid.remove_row(1)
self.env_line_mapping = []
self.working_dir_entry.set_text(startup_info.working_directory)
self.cmd_arguments_entry.set_text(startup_info.cmd_arguments)
for env in startup_info.env_vars:
self._add_env_var(env[0], env[1])
@require_gui_thread
def _construct_startup_info(self):
"""
@rtype: debugger.debugger_api.StartupInfo
"""
startup_info = StartupInfo()
startup_info.working_directory = self.working_dir_entry.get_text()
startup_info.cmd_arguments = self.cmd_arguments_entry.get_text()
for mapping in self.env_line_mapping:
name = mapping[1].get_text()
value = mapping[2].get_text()
if name != "":
startup_info.env_vars.append((name, value))
return startup_info
@require_gui_thread
def _show_choose_dir_dialog(self):
current_folder = self.working_dir_entry.get_text()
folder = FileOpenDialog.select_folder("Choose working directory",
self.dialog,
current_folder)
if folder:
self.dialog_builder.get_object("working_directory").set_text(
folder
)
@require_gui_thread
def _add_env_var(self, name="", value=""):
"""
@type name: str
@type value: str
"""
name_entry = Gtk.Entry()
name_entry.set_text(name)
name_entry.set_tooltip_text("Environment variable name")
name_entry.show_all()
value_entry = Gtk.Entry()
value_entry.set_text(value)
value_entry.set_tooltip_text("Environment variable value")
value_entry.show_all()
remove_button = Gtk.Button()
remove_button.set_image(Gtk.Image().new_from_stock(
Gtk.STOCK_REMOVE, Gtk.IconSize.BUTTON))
remove_button.connect("clicked", lambda *x: self._remove_env_row(x[0]))
remove_button.set_hexpand(False)
remove_button.set_halign(Gtk.Align.CENTER)
remove_button.set_tooltip_text("Remove environment variable")
remove_button.show_all()
row = len(self.env_line_mapping) + 1 # + 1 because of header
self.grid.attach(name_entry, 0, row, 1, 1)
self.grid.attach(value_entry, 1, row, 1, 1)
self.grid.attach(remove_button, 2, row, 1, 1)
self.env_line_mapping.append((remove_button, name_entry, value_entry))
@require_gui_thread
def _remove_env_row(self, remove_button):
"""
@type remove_button: Gtk.Button
"""
row = 0
for i, mapping in enumerate(self.env_line_mapping):
if mapping[0] == remove_button:
row = i
break
self.grid.remove_row(row + 1) # + 1 because of header
del self.env_line_mapping[row]
@require_gui_thread
def _cancel_dialog(self):
self.dialog.response(Gtk.ResponseType.CANCEL)
@require_gui_thread
def _confirm_dialog(self):
self.dialog.response(Gtk.ResponseType.YES)
|
"""
Turtle graphics is a popular way for introducing programming to
kids. It was part of the original Logo programming language developed
by Wally Feurzig and Seymour Papert in 1966.
Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it
the command turtle.forward(15), and it moves (on-screen!) 15 pixels in
the direction it is facing, drawing a line as it moves. Give it the
command turtle.right(25), and it rotates in-place 25 degrees clockwise.
By combining together these and similar commands, intricate shapes and
pictures can easily be drawn.
----- turtle.py
This module is an extended reimplementation of turtle.py from the
Python standard distribution up to Python 2.5. (See: http://www.python.org)
It tries to keep the merits of turtle.py and to be (nearly) 100%
compatible with it. This means in the first place to enable the
learning programmer to use all the commands, classes and methods
interactively when using the module from within IDLE run with
the -n switch.
Roughly it has the following features added:
- Better animation of the turtle movements, especially of turning the
turtle. So the turtles can more easily be used as a visual feedback
instrument by the (beginning) programmer.
- Different turtle shapes, gif-images as turtle shapes, user defined
and user controllable turtle shapes, among them compound
(multicolored) shapes. Turtle shapes can be stretched and tilted, which
makes turtles very versatile geometrical objects.
- Fine control over turtle movement and screen updates via delay(),
and enhanced tracer() and speed() methods.
- Aliases for the most commonly used commands, like fd for forward etc.,
following the early Logo traditions. This reduces the boring work of
typing long sequences of commands, which often occur in a natural way
when kids try to program fancy pictures on their first encounter with
turtle graphics.
- Turtles now have an undo()-method with configurable undo-buffer.
- Some simple commands/methods for creating event driven programs
(mouse-, key-, timer-events). Especially useful for programming games.
- A scrollable Canvas class. The default scrollable Canvas can be
extended interactively as needed while playing around with the turtle(s).
- A TurtleScreen class with methods controlling background color or
background image, window and canvas size and other properties of the
TurtleScreen.
- There is a method, setworldcoordinates(), to install a user defined
coordinate-system for the TurtleScreen.
- The implementation uses a 2-vector class named Vec2D, derived from tuple.
This class is public, so it can be imported by the application programmer,
which makes certain types of computations very natural and compact.
- Appearance of the TurtleScreen and the Turtles at startup/import can be
configured by means of a turtle.cfg configuration file.
The default configuration mimics the appearance of the old turtle module.
- If configured appropriately the module reads in docstrings from a docstring
dictionary in some different language, supplied separately and replaces
the English ones by those read in. There is a utility function
write_docstringdict() to write a dictionary with the original (English)
docstrings to disc, so it can serve as a template for translations.
Behind the scenes there are some features included with possible
extensions in mind. These will be commented and documented elsewhere.
"""
_ver = "turtle 1.1b- - for Python 3.1 - 4. 5. 2009"
import tkinter as TK
import types
import math
import time
import inspect
import sys
from os.path import isfile, split, join
from copy import deepcopy
from tkinter import simpledialog
_tg_classes = ['ScrolledCanvas', 'TurtleScreen', 'Screen',
'RawTurtle', 'Turtle', 'RawPen', 'Pen', 'Shape', 'Vec2D']
_tg_screen_functions = ['addshape', 'bgcolor', 'bgpic', 'bye',
'clearscreen', 'colormode', 'delay', 'exitonclick', 'getcanvas',
'getshapes', 'listen', 'mainloop', 'mode', 'numinput',
'onkey', 'onkeypress', 'onkeyrelease', 'onscreenclick', 'ontimer',
'register_shape', 'resetscreen', 'screensize', 'setup',
'setworldcoordinates', 'textinput', 'title', 'tracer', 'turtles', 'update',
'window_height', 'window_width']
_tg_turtle_functions = ['back', 'backward', 'begin_fill', 'begin_poly', 'bk',
'circle', 'clear', 'clearstamp', 'clearstamps', 'clone', 'color',
'degrees', 'distance', 'dot', 'down', 'end_fill', 'end_poly', 'fd',
'fillcolor', 'filling', 'forward', 'get_poly', 'getpen', 'getscreen', 'get_shapepoly',
'getturtle', 'goto', 'heading', 'hideturtle', 'home', 'ht', 'isdown',
'isvisible', 'left', 'lt', 'onclick', 'ondrag', 'onrelease', 'pd',
'pen', 'pencolor', 'pendown', 'pensize', 'penup', 'pos', 'position',
'pu', 'radians', 'right', 'reset', 'resizemode', 'rt',
'seth', 'setheading', 'setpos', 'setposition', 'settiltangle',
'setundobuffer', 'setx', 'sety', 'shape', 'shapesize', 'shapetransform', 'shearfactor', 'showturtle',
'speed', 'st', 'stamp', 'tilt', 'tiltangle', 'towards',
'turtlesize', 'undo', 'undobufferentries', 'up', 'width',
'write', 'xcor', 'ycor']
_tg_utilities = ['write_docstringdict', 'done']
__all__ = (_tg_classes + _tg_screen_functions + _tg_turtle_functions +
_tg_utilities + ['Terminator']) # + _math_functions)
_alias_list = ['addshape', 'backward', 'bk', 'fd', 'ht', 'lt', 'pd', 'pos',
'pu', 'rt', 'seth', 'setpos', 'setposition', 'st',
'turtlesize', 'up', 'width']
_CFG = {"width" : 0.5, # Screen
"height" : 0.75,
"canvwidth" : 400,
"canvheight": 300,
"leftright": None,
"topbottom": None,
"mode": "standard", # TurtleScreen
"colormode": 1.0,
"delay": 10,
"undobuffersize": 1000, # RawTurtle
"shape": "classic",
"pencolor" : "black",
"fillcolor" : "black",
"resizemode" : "noresize",
"visible" : True,
"language": "english", # docstrings
"exampleturtle": "turtle",
"examplescreen": "screen",
"title": "Python Turtle Graphics",
"using_IDLE": False
}
def config_dict(filename):
"""Convert content of config-file into dictionary."""
with open(filename, "r") as f:
cfglines = f.readlines()
cfgdict = {}
for line in cfglines:
line = line.strip()
if not line or line.startswith("#"):
continue
try:
key, value = line.split("=")
except ValueError:
print("Bad line in config-file %s:\n%s" % (filename,line))
continue
key = key.strip()
value = value.strip()
if value in ["True", "False", "None", "''", '""']:
value = eval(value)
else:
try:
if "." in value:
value = float(value)
else:
value = int(value)
except ValueError:
pass # value need not be converted
cfgdict[key] = value
return cfgdict
def readconfig(cfgdict):
"""Read config-files, change configuration-dict accordingly.
If there is a turtle.cfg file in the current working directory,
read it from there. If this contains an importconfig-value,
say 'myway', construct filename turtle_mayway.cfg else use
turtle.cfg and read it from the import-directory, where
turtle.py is located.
Update configuration dictionary first according to config-file,
in the import directory, then according to config-file in the
current working directory.
If no config-file is found, the default configuration is used.
"""
default_cfg = "turtle.cfg"
cfgdict1 = {}
cfgdict2 = {}
if isfile(default_cfg):
cfgdict1 = config_dict(default_cfg)
if "importconfig" in cfgdict1:
default_cfg = "turtle_%s.cfg" % cfgdict1["importconfig"]
try:
head, tail = split(__file__)
cfg_file2 = join(head, default_cfg)
except Exception:
cfg_file2 = ""
if isfile(cfg_file2):
cfgdict2 = config_dict(cfg_file2)
_CFG.update(cfgdict2)
_CFG.update(cfgdict1)
try:
readconfig(_CFG)
except Exception:
print ("No configfile read, reason unknown")
class Vec2D(tuple):
"""A 2 dimensional vector class, used as a helper class
for implementing turtle graphics.
May be useful for turtle graphics programs also.
Derived from tuple, so a vector is a tuple!
Provides (for a, b vectors, k number):
a+b vector addition
a-b vector subtraction
a*b inner product
k*a and a*k multiplication with scalar
|a| absolute value of a
a.rotate(angle) rotation
"""
def __new__(cls, x, y):
return tuple.__new__(cls, (x, y))
def __add__(self, other):
return Vec2D(self[0]+other[0], self[1]+other[1])
def __mul__(self, other):
if isinstance(other, Vec2D):
return self[0]*other[0]+self[1]*other[1]
return Vec2D(self[0]*other, self[1]*other)
def __rmul__(self, other):
if isinstance(other, int) or isinstance(other, float):
return Vec2D(self[0]*other, self[1]*other)
def __sub__(self, other):
return Vec2D(self[0]-other[0], self[1]-other[1])
def __neg__(self):
return Vec2D(-self[0], -self[1])
def __abs__(self):
return (self[0]**2 + self[1]**2)**0.5
def rotate(self, angle):
"""rotate self counterclockwise by angle
"""
perp = Vec2D(-self[1], self[0])
angle = angle * math.pi / 180.0
c, s = math.cos(angle), math.sin(angle)
return Vec2D(self[0]*c+perp[0]*s, self[1]*c+perp[1]*s)
def __getnewargs__(self):
return (self[0], self[1])
def __repr__(self):
return "(%.2f,%.2f)" % self
def __methodDict(cls, _dict):
"""helper function for Scrolled Canvas"""
baseList = list(cls.__bases__)
baseList.reverse()
for _super in baseList:
__methodDict(_super, _dict)
for key, value in cls.__dict__.items():
if type(value) == types.FunctionType:
_dict[key] = value
def __methods(cls):
"""helper function for Scrolled Canvas"""
_dict = {}
__methodDict(cls, _dict)
return _dict.keys()
__stringBody = (
'def %(method)s(self, *args, **kw): return ' +
'self.%(attribute)s.%(method)s(*args, **kw)')
def __forwardmethods(fromClass, toClass, toPart, exclude = ()):
### MANY CHANGES ###
_dict_1 = {}
__methodDict(toClass, _dict_1)
_dict = {}
mfc = __methods(fromClass)
for ex in _dict_1.keys():
if ex[:1] == '_' or ex[-1:] == '_' or ex in exclude or ex in mfc:
pass
else:
_dict[ex] = _dict_1[ex]
for method, func in _dict.items():
d = {'method': method, 'func': func}
if isinstance(toPart, str):
execString = \
__stringBody % {'method' : method, 'attribute' : toPart}
exec(execString, d)
setattr(fromClass, method, d[method]) ### NEWU!
class ScrolledCanvas(TK.Frame):
"""Modeled after the scrolled canvas class from Grayons's Tkinter book.
Used as the default canvas, which pops up automatically when
using turtle graphics functions or the Turtle class.
"""
def __init__(self, master, width=500, height=350,
canvwidth=600, canvheight=500):
TK.Frame.__init__(self, master, width=width, height=height)
self._rootwindow = self.winfo_toplevel()
self.width, self.height = width, height
self.canvwidth, self.canvheight = canvwidth, canvheight
self.bg = "white"
self._canvas = TK.Canvas(master, width=width, height=height,
bg=self.bg, relief=TK.SUNKEN, borderwidth=2)
self.hscroll = TK.Scrollbar(master, command=self._canvas.xview,
orient=TK.HORIZONTAL)
self.vscroll = TK.Scrollbar(master, command=self._canvas.yview)
self._canvas.configure(xscrollcommand=self.hscroll.set,
yscrollcommand=self.vscroll.set)
self.rowconfigure(0, weight=1, minsize=0)
self.columnconfigure(0, weight=1, minsize=0)
self._canvas.grid(padx=1, in_ = self, pady=1, row=0,
column=0, rowspan=1, columnspan=1, sticky='news')
self.vscroll.grid(padx=1, in_ = self, pady=1, row=0,
column=1, rowspan=1, columnspan=1, sticky='news')
self.hscroll.grid(padx=1, in_ = self, pady=1, row=1,
column=0, rowspan=1, columnspan=1, sticky='news')
self.reset()
self._rootwindow.bind('<Configure>', self.onResize)
def reset(self, canvwidth=None, canvheight=None, bg = None):
"""Adjust canvas and scrollbars according to given canvas size."""
if canvwidth:
self.canvwidth = canvwidth
if canvheight:
self.canvheight = canvheight
if bg:
self.bg = bg
self._canvas.config(bg=bg,
scrollregion=(-self.canvwidth//2, -self.canvheight//2,
self.canvwidth//2, self.canvheight//2))
self._canvas.xview_moveto(0.5*(self.canvwidth - self.width + 30) /
self.canvwidth)
self._canvas.yview_moveto(0.5*(self.canvheight- self.height + 30) /
self.canvheight)
self.adjustScrolls()
def adjustScrolls(self):
""" Adjust scrollbars according to window- and canvas-size.
"""
cwidth = self._canvas.winfo_width()
cheight = self._canvas.winfo_height()
self._canvas.xview_moveto(0.5*(self.canvwidth-cwidth)/self.canvwidth)
self._canvas.yview_moveto(0.5*(self.canvheight-cheight)/self.canvheight)
if cwidth < self.canvwidth or cheight < self.canvheight:
self.hscroll.grid(padx=1, in_ = self, pady=1, row=1,
column=0, rowspan=1, columnspan=1, sticky='news')
self.vscroll.grid(padx=1, in_ = self, pady=1, row=0,
column=1, rowspan=1, columnspan=1, sticky='news')
else:
self.hscroll.grid_forget()
self.vscroll.grid_forget()
def onResize(self, event):
"""self-explanatory"""
self.adjustScrolls()
def bbox(self, *args):
""" 'forward' method, which canvas itself has inherited...
"""
return self._canvas.bbox(*args)
def cget(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
return self._canvas.cget(*args, **kwargs)
def config(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.config(*args, **kwargs)
def bind(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.bind(*args, **kwargs)
def unbind(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.unbind(*args, **kwargs)
def focus_force(self):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.focus_force()
__forwardmethods(ScrolledCanvas, TK.Canvas, '_canvas')
class _Root(TK.Tk):
"""Root class for Screen based on Tkinter."""
def __init__(self):
TK.Tk.__init__(self)
def setupcanvas(self, width, height, cwidth, cheight):
self._canvas = ScrolledCanvas(self, width, height, cwidth, cheight)
self._canvas.pack(expand=1, fill="both")
def _getcanvas(self):
return self._canvas
def set_geometry(self, width, height, startx, starty):
self.geometry("%dx%d%+d%+d"%(width, height, startx, starty))
def ondestroy(self, destroy):
self.wm_protocol("WM_DELETE_WINDOW", destroy)
def win_width(self):
return self.winfo_screenwidth()
def win_height(self):
return self.winfo_screenheight()
Canvas = TK.Canvas
class TurtleScreenBase(object):
"""Provide the basic graphics functionality.
Interface between Tkinter and turtle.py.
To port turtle.py to some different graphics toolkit
a corresponding TurtleScreenBase class has to be implemented.
"""
@staticmethod
def _blankimage():
"""return a blank image object
"""
img = TK.PhotoImage(width=1, height=1)
img.blank()
return img
@staticmethod
def _image(filename):
"""return an image object containing the
imagedata from a gif-file named filename.
"""
return TK.PhotoImage(file=filename)
def __init__(self, cv):
self.cv = cv
if isinstance(cv, ScrolledCanvas):
w = self.cv.canvwidth
h = self.cv.canvheight
else: # expected: ordinary TK.Canvas
w = int(self.cv.cget("width"))
h = int(self.cv.cget("height"))
self.cv.config(scrollregion = (-w//2, -h//2, w//2, h//2 ))
self.canvwidth = w
self.canvheight = h
self.xscale = self.yscale = 1.0
def _createpoly(self):
"""Create an invisible polygon item on canvas self.cv)
"""
return self.cv.create_polygon((0, 0, 0, 0, 0, 0), fill="", outline="")
def _drawpoly(self, polyitem, coordlist, fill=None,
outline=None, width=None, top=False):
"""Configure polygonitem polyitem according to provided
arguments:
coordlist is sequence of coordinates
fill is filling color
outline is outline color
top is a boolean value, which specifies if polyitem
will be put on top of the canvas' displaylist so it
will not be covered by other items.
"""
cl = []
for x, y in coordlist:
cl.append(x * self.xscale)
cl.append(-y * self.yscale)
self.cv.coords(polyitem, *cl)
if fill is not None:
self.cv.itemconfigure(polyitem, fill=fill)
if outline is not None:
self.cv.itemconfigure(polyitem, outline=outline)
if width is not None:
self.cv.itemconfigure(polyitem, width=width)
if top:
self.cv.tag_raise(polyitem)
def _createline(self):
"""Create an invisible line item on canvas self.cv)
"""
return self.cv.create_line(0, 0, 0, 0, fill="", width=2,
capstyle = TK.ROUND)
def _drawline(self, lineitem, coordlist=None,
fill=None, width=None, top=False):
"""Configure lineitem according to provided arguments:
coordlist is sequence of coordinates
fill is drawing color
width is width of drawn line.
top is a boolean value, which specifies if polyitem
will be put on top of the canvas' displaylist so it
will not be covered by other items.
"""
if coordlist is not None:
cl = []
for x, y in coordlist:
cl.append(x * self.xscale)
cl.append(-y * self.yscale)
self.cv.coords(lineitem, *cl)
if fill is not None:
self.cv.itemconfigure(lineitem, fill=fill)
if width is not None:
self.cv.itemconfigure(lineitem, width=width)
if top:
self.cv.tag_raise(lineitem)
def _delete(self, item):
"""Delete graphics item from canvas.
If item is"all" delete all graphics items.
"""
self.cv.delete(item)
def _update(self):
"""Redraw graphics items on canvas
"""
self.cv.update()
def _delay(self, delay):
"""Delay subsequent canvas actions for delay ms."""
self.cv.after(delay)
def _iscolorstring(self, color):
"""Check if the string color is a legal Tkinter color string.
"""
try:
rgb = self.cv.winfo_rgb(color)
ok = True
except TK.TclError:
ok = False
return ok
def _bgcolor(self, color=None):
"""Set canvas' backgroundcolor if color is not None,
else return backgroundcolor."""
if color is not None:
self.cv.config(bg = color)
self._update()
else:
return self.cv.cget("bg")
def _write(self, pos, txt, align, font, pencolor):
"""Write txt at pos in canvas with specified font
and color.
Return text item and x-coord of right bottom corner
of text's bounding box."""
x, y = pos
x = x * self.xscale
y = y * self.yscale
anchor = {"left":"sw", "center":"s", "right":"se" }
item = self.cv.create_text(x-1, -y, text = txt, anchor = anchor[align],
fill = pencolor, font = font)
x0, y0, x1, y1 = self.cv.bbox(item)
self.cv.update()
return item, x1-1
def _onclick(self, item, fun, num=1, add=None):
"""Bind fun to mouse-click event on turtle.
fun must be a function with two arguments, the coordinates
of the clicked point on the canvas.
num, the number of the mouse-button defaults to 1
"""
if fun is None:
self.cv.tag_unbind(item, "<Button-%s>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.tag_bind(item, "<Button-%s>" % num, eventfun, add)
def _onrelease(self, item, fun, num=1, add=None):
"""Bind fun to mouse-button-release event on turtle.
fun must be a function with two arguments, the coordinates
of the point on the canvas where mouse button is released.
num, the number of the mouse-button defaults to 1
If a turtle is clicked, first _onclick-event will be performed,
then _onscreensclick-event.
"""
if fun is None:
self.cv.tag_unbind(item, "<Button%s-ButtonRelease>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.tag_bind(item, "<Button%s-ButtonRelease>" % num,
eventfun, add)
def _ondrag(self, item, fun, num=1, add=None):
"""Bind fun to mouse-move-event (with pressed mouse button) on turtle.
fun must be a function with two arguments, the coordinates of the
actual mouse position on the canvas.
num, the number of the mouse-button defaults to 1
Every sequence of mouse-move-events on a turtle is preceded by a
mouse-click event on that turtle.
"""
if fun is None:
self.cv.tag_unbind(item, "<Button%s-Motion>" % num)
else:
def eventfun(event):
try:
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
except Exception:
pass
self.cv.tag_bind(item, "<Button%s-Motion>" % num, eventfun, add)
def _onscreenclick(self, fun, num=1, add=None):
"""Bind fun to mouse-click event on canvas.
fun must be a function with two arguments, the coordinates
of the clicked point on the canvas.
num, the number of the mouse-button defaults to 1
If a turtle is clicked, first _onclick-event will be performed,
then _onscreensclick-event.
"""
if fun is None:
self.cv.unbind("<Button-%s>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.bind("<Button-%s>" % num, eventfun, add)
def _onkeyrelease(self, fun, key):
"""Bind fun to key-release event of key.
Canvas must have focus. See method listen
"""
if fun is None:
self.cv.unbind("<KeyRelease-%s>" % key, None)
else:
def eventfun(event):
fun()
self.cv.bind("<KeyRelease-%s>" % key, eventfun)
def _onkeypress(self, fun, key=None):
"""If key is given, bind fun to key-press event of key.
Otherwise bind fun to any key-press.
Canvas must have focus. See method listen.
"""
if fun is None:
if key is None:
self.cv.unbind("<KeyPress>", None)
else:
self.cv.unbind("<KeyPress-%s>" % key, None)
else:
def eventfun(event):
fun()
if key is None:
self.cv.bind("<KeyPress>", eventfun)
else:
self.cv.bind("<KeyPress-%s>" % key, eventfun)
def _listen(self):
"""Set focus on canvas (in order to collect key-events)
"""
self.cv.focus_force()
def _ontimer(self, fun, t):
"""Install a timer, which calls fun after t milliseconds.
"""
if t == 0:
self.cv.after_idle(fun)
else:
self.cv.after(t, fun)
def _createimage(self, image):
"""Create and return image item on canvas.
"""
return self.cv.create_image(0, 0, image=image)
def _drawimage(self, item, pos, image):
"""Configure image item as to draw image object
at position (x,y) on canvas)
"""
x, y = pos
self.cv.coords(item, (x * self.xscale, -y * self.yscale))
self.cv.itemconfig(item, image=image)
def _setbgpic(self, item, image):
"""Configure image item as to draw image object
at center of canvas. Set item to the first item
in the displaylist, so it will be drawn below
any other item ."""
self.cv.itemconfig(item, image=image)
self.cv.tag_lower(item)
def _type(self, item):
"""Return 'line' or 'polygon' or 'image' depending on
type of item.
"""
return self.cv.type(item)
def _pointlist(self, item):
"""returns list of coordinate-pairs of points of item
Example (for insiders):
>>> from turtle import *
>>> getscreen()._pointlist(getturtle().turtle._item)
[(0.0, 9.9999999999999982), (0.0, -9.9999999999999982),
(9.9999999999999982, 0.0)]
>>> """
cl = self.cv.coords(item)
pl = [(cl[i], -cl[i+1]) for i in range(0, len(cl), 2)]
return pl
def _setscrollregion(self, srx1, sry1, srx2, sry2):
self.cv.config(scrollregion=(srx1, sry1, srx2, sry2))
def _rescale(self, xscalefactor, yscalefactor):
items = self.cv.find_all()
for item in items:
coordinates = list(self.cv.coords(item))
newcoordlist = []
while coordinates:
x, y = coordinates[:2]
newcoordlist.append(x * xscalefactor)
newcoordlist.append(y * yscalefactor)
coordinates = coordinates[2:]
self.cv.coords(item, *newcoordlist)
def _resize(self, canvwidth=None, canvheight=None, bg=None):
"""Resize the canvas the turtles are drawing on. Does
not alter the drawing window.
"""
# needs amendment
if not isinstance(self.cv, ScrolledCanvas):
return self.canvwidth, self.canvheight
if canvwidth is canvheight is bg is None:
return self.cv.canvwidth, self.cv.canvheight
if canvwidth is not None:
self.canvwidth = canvwidth
if canvheight is not None:
self.canvheight = canvheight
self.cv.reset(canvwidth, canvheight, bg)
def _window_size(self):
""" Return the width and height of the turtle window.
"""
width = self.cv.winfo_width()
if width <= 1: # the window isn't managed by a geometry manager
width = self.cv['width']
height = self.cv.winfo_height()
if height <= 1: # the window isn't managed by a geometry manager
height = self.cv['height']
return width, height
def mainloop(self):
"""Starts event loop - calling Tkinter's mainloop function.
No argument.
Must be last statement in a turtle graphics program.
Must NOT be used if a script is run from within IDLE in -n mode
(No subprocess) - for interactive use of turtle graphics.
Example (for a TurtleScreen instance named screen):
>>> screen.mainloop()
"""
TK.mainloop()
def textinput(self, title, prompt):
"""Pop up a dialog window for input of a string.
Arguments: title is the title of the dialog window,
prompt is a text mostly describing what information to input.
Return the string input
If the dialog is canceled, return None.
Example (for a TurtleScreen instance named screen):
>>> screen.textinput("NIM", "Name of first player:")
"""
return simpledialog.askstring(title, prompt)
def numinput(self, title, prompt, default=None, minval=None, maxval=None):
"""Pop up a dialog window for input of a number.
Arguments: title is the title of the dialog window,
prompt is a text mostly describing what numerical information to input.
default: default value
minval: minimum value for imput
maxval: maximum value for input
The number input must be in the range minval .. maxval if these are
given. If not, a hint is issued and the dialog remains open for
correction. Return the number input.
If the dialog is canceled, return None.
Example (for a TurtleScreen instance named screen):
>>> screen.numinput("Poker", "Your stakes:", 1000, minval=10, maxval=10000)
"""
return simpledialog.askfloat(title, prompt, initialvalue=default,
minvalue=minval, maxvalue=maxval)
class Terminator (Exception):
"""Will be raised in TurtleScreen.update, if _RUNNING becomes False.
This stops execution of a turtle graphics script.
Main purpose: use in the Demo-Viewer turtle.Demo.py.
"""
pass
class TurtleGraphicsError(Exception):
"""Some TurtleGraphics Error
"""
class Shape(object):
"""Data structure modeling shapes.
attribute _type is one of "polygon", "image", "compound"
attribute _data is - depending on _type a poygon-tuple,
an image or a list constructed using the addcomponent method.
"""
def __init__(self, type_, data=None):
self._type = type_
if type_ == "polygon":
if isinstance(data, list):
data = tuple(data)
elif type_ == "image":
if isinstance(data, str):
if data.lower().endswith(".gif") and isfile(data):
data = TurtleScreen._image(data)
# else data assumed to be Photoimage
elif type_ == "compound":
data = []
else:
raise TurtleGraphicsError("There is no shape type %s" % type_)
self._data = data
def addcomponent(self, poly, fill, outline=None):
"""Add component to a shape of type compound.
Arguments: poly is a polygon, i. e. a tuple of number pairs.
fill is the fillcolor of the component,
outline is the outline color of the component.
call (for a Shapeobject namend s):
-- s.addcomponent(((0,0), (10,10), (-10,10)), "red", "blue")
Example:
>>> poly = ((0,0),(10,-5),(0,10),(-10,-5))
>>> s = Shape("compound")
>>> s.addcomponent(poly, "red", "blue")
>>> # .. add more components and then use register_shape()
"""
if self._type != "compound":
raise TurtleGraphicsError("Cannot add component to %s Shape"
% self._type)
if outline is None:
outline = fill
self._data.append([poly, fill, outline])
class Tbuffer(object):
"""Ring buffer used as undobuffer for RawTurtle objects."""
def __init__(self, bufsize=10):
self.bufsize = bufsize
self.buffer = [[None]] * bufsize
self.ptr = -1
self.cumulate = False
def reset(self, bufsize=None):
if bufsize is None:
for i in range(self.bufsize):
self.buffer[i] = [None]
else:
self.bufsize = bufsize
self.buffer = [[None]] * bufsize
self.ptr = -1
def push(self, item):
if self.bufsize > 0:
if not self.cumulate:
self.ptr = (self.ptr + 1) % self.bufsize
self.buffer[self.ptr] = item
else:
self.buffer[self.ptr].append(item)
def pop(self):
if self.bufsize > 0:
item = self.buffer[self.ptr]
if item is None:
return None
else:
self.buffer[self.ptr] = [None]
self.ptr = (self.ptr - 1) % self.bufsize
return (item)
def nr_of_items(self):
return self.bufsize - self.buffer.count([None])
def __repr__(self):
return str(self.buffer) + " " + str(self.ptr)
class TurtleScreen(TurtleScreenBase):
"""Provides screen oriented methods like setbg etc.
Only relies upon the methods of TurtleScreenBase and NOT
upon components of the underlying graphics toolkit -
which is Tkinter in this case.
"""
_RUNNING = True
def __init__(self, cv, mode=_CFG["mode"],
colormode=_CFG["colormode"], delay=_CFG["delay"]):
self._shapes = {
"arrow" : Shape("polygon", ((-10,0), (10,0), (0,10))),
"turtle" : Shape("polygon", ((0,16), (-2,14), (-1,10), (-4,7),
(-7,9), (-9,8), (-6,5), (-7,1), (-5,-3), (-8,-6),
(-6,-8), (-4,-5), (0,-7), (4,-5), (6,-8), (8,-6),
(5,-3), (7,1), (6,5), (9,8), (7,9), (4,7), (1,10),
(2,14))),
"circle" : Shape("polygon", ((10,0), (9.51,3.09), (8.09,5.88),
(5.88,8.09), (3.09,9.51), (0,10), (-3.09,9.51),
(-5.88,8.09), (-8.09,5.88), (-9.51,3.09), (-10,0),
(-9.51,-3.09), (-8.09,-5.88), (-5.88,-8.09),
(-3.09,-9.51), (-0.00,-10.00), (3.09,-9.51),
(5.88,-8.09), (8.09,-5.88), (9.51,-3.09))),
"square" : Shape("polygon", ((10,-10), (10,10), (-10,10),
(-10,-10))),
"triangle" : Shape("polygon", ((10,-5.77), (0,11.55),
(-10,-5.77))),
"classic": Shape("polygon", ((0,0),(-5,-9),(0,-7),(5,-9))),
"blank" : Shape("image", self._blankimage())
}
self._bgpics = {"nopic" : ""}
TurtleScreenBase.__init__(self, cv)
self._mode = mode
self._delayvalue = delay
self._colormode = _CFG["colormode"]
self._keys = []
self.clear()
if sys.platform == 'darwin':
# Force Turtle window to the front on OS X. This is needed because
# the Turtle window will show behind the Terminal window when you
# start the demo from the command line.
rootwindow = cv.winfo_toplevel()
rootwindow.call('wm', 'attributes', '.', '-topmost', '1')
rootwindow.call('wm', 'attributes', '.', '-topmost', '0')
def clear(self):
"""Delete all drawings and all turtles from the TurtleScreen.
No argument.
Reset empty TurtleScreen to its initial state: white background,
no backgroundimage, no eventbindings and tracing on.
Example (for a TurtleScreen instance named screen):
>>> screen.clear()
Note: this method is not available as function.
"""
self._delayvalue = _CFG["delay"]
self._colormode = _CFG["colormode"]
self._delete("all")
self._bgpic = self._createimage("")
self._bgpicname = "nopic"
self._tracing = 1
self._updatecounter = 0
self._turtles = []
self.bgcolor("white")
for btn in 1, 2, 3:
self.onclick(None, btn)
self.onkeypress(None)
for key in self._keys[:]:
self.onkey(None, key)
self.onkeypress(None, key)
Turtle._pen = None
def mode(self, mode=None):
"""Set turtle-mode ('standard', 'logo' or 'world') and perform reset.
Optional argument:
mode -- one of the strings 'standard', 'logo' or 'world'
Mode 'standard' is compatible with turtle.py.
Mode 'logo' is compatible with most Logo-Turtle-Graphics.
Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in
this mode angles appear distorted if x/y unit-ratio doesn't equal 1.
If mode is not given, return the current mode.
Mode Initial turtle heading positive angles
------------|-------------------------|-------------------
'standard' to the right (east) counterclockwise
'logo' upward (north) clockwise
Examples:
>>> mode('logo') # resets turtle heading to north
>>> mode()
'logo'
"""
if mode is None:
return self._mode
mode = mode.lower()
if mode not in ["standard", "logo", "world"]:
raise TurtleGraphicsError("No turtle-graphics-mode %s" % mode)
self._mode = mode
if mode in ["standard", "logo"]:
self._setscrollregion(-self.canvwidth//2, -self.canvheight//2,
self.canvwidth//2, self.canvheight//2)
self.xscale = self.yscale = 1.0
self.reset()
def setworldcoordinates(self, llx, lly, urx, ury):
"""Set up a user defined coordinate-system.
Arguments:
llx -- a number, x-coordinate of lower left corner of canvas
lly -- a number, y-coordinate of lower left corner of canvas
urx -- a number, x-coordinate of upper right corner of canvas
ury -- a number, y-coordinate of upper right corner of canvas
Set up user coodinat-system and switch to mode 'world' if necessary.
This performs a screen.reset. If mode 'world' is already active,
all drawings are redrawn according to the new coordinates.
But ATTENTION: in user-defined coordinatesystems angles may appear
distorted. (see Screen.mode())
Example (for a TurtleScreen instance named screen):
>>> screen.setworldcoordinates(-10,-0.5,50,1.5)
>>> for _ in range(36):
... left(10)
... forward(0.5)
"""
if self.mode() != "world":
self.mode("world")
xspan = float(urx - llx)
yspan = float(ury - lly)
wx, wy = self._window_size()
self.screensize(wx-20, wy-20)
oldxscale, oldyscale = self.xscale, self.yscale
self.xscale = self.canvwidth / xspan
self.yscale = self.canvheight / yspan
srx1 = llx * self.xscale
sry1 = -ury * self.yscale
srx2 = self.canvwidth + srx1
sry2 = self.canvheight + sry1
self._setscrollregion(srx1, sry1, srx2, sry2)
self._rescale(self.xscale/oldxscale, self.yscale/oldyscale)
self.update()
def register_shape(self, name, shape=None):
"""Adds a turtle shape to TurtleScreen's shapelist.
Arguments:
(1) name is the name of a gif-file and shape is None.
Installs the corresponding image shape.
!! Image-shapes DO NOT rotate when turning the turtle,
!! so they do not display the heading of the turtle!
(2) name is an arbitrary string and shape is a tuple
of pairs of coordinates. Installs the corresponding
polygon shape
(3) name is an arbitrary string and shape is a
(compound) Shape object. Installs the corresponding
compound shape.
To use a shape, you have to issue the command shape(shapename).
call: register_shape("turtle.gif")
--or: register_shape("tri", ((0,0), (10,10), (-10,10)))
Example (for a TurtleScreen instance named screen):
>>> screen.register_shape("triangle", ((5,-3),(0,5),(-5,-3)))
"""
if shape is None:
# image
if name.lower().endswith(".gif"):
shape = Shape("image", self._image(name))
else:
raise TurtleGraphicsError("Bad arguments for register_shape.\n"
+ "Use help(register_shape)" )
elif isinstance(shape, tuple):
shape = Shape("polygon", shape)
## else shape assumed to be Shape-instance
self._shapes[name] = shape
def _colorstr(self, color):
"""Return color string corresponding to args.
Argument may be a string or a tuple of three
numbers corresponding to actual colormode,
i.e. in the range 0<=n<=colormode.
If the argument doesn't represent a color,
an error is raised.
"""
if len(color) == 1:
color = color[0]
if isinstance(color, str):
if self._iscolorstring(color) or color == "":
return color
else:
raise TurtleGraphicsError("bad color string: %s" % str(color))
try:
r, g, b = color
except (TypeError, ValueError):
raise TurtleGraphicsError("bad color arguments: %s" % str(color))
if self._colormode == 1.0:
r, g, b = [round(255.0*x) for x in (r, g, b)]
if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)):
raise TurtleGraphicsError("bad color sequence: %s" % str(color))
return "#%02x%02x%02x" % (r, g, b)
def _color(self, cstr):
if not cstr.startswith("#"):
return cstr
if len(cstr) == 7:
cl = [int(cstr[i:i+2], 16) for i in (1, 3, 5)]
elif len(cstr) == 4:
cl = [16*int(cstr[h], 16) for h in cstr[1:]]
else:
raise TurtleGraphicsError("bad colorstring: %s" % cstr)
return tuple(c * self._colormode/255 for c in cl)
def colormode(self, cmode=None):
"""Return the colormode or set it to 1.0 or 255.
Optional argument:
cmode -- one of the values 1.0 or 255
r, g, b values of colortriples have to be in range 0..cmode.
Example (for a TurtleScreen instance named screen):
>>> screen.colormode()
1.0
>>> screen.colormode(255)
>>> pencolor(240,160,80)
"""
if cmode is None:
return self._colormode
if cmode == 1.0:
self._colormode = float(cmode)
elif cmode == 255:
self._colormode = int(cmode)
def reset(self):
"""Reset all Turtles on the Screen to their initial state.
No argument.
Example (for a TurtleScreen instance named screen):
>>> screen.reset()
"""
for turtle in self._turtles:
turtle._setmode(self._mode)
turtle.reset()
def turtles(self):
"""Return the list of turtles on the screen.
Example (for a TurtleScreen instance named screen):
>>> screen.turtles()
[<turtle.Turtle object at 0x00E11FB0>]
"""
return self._turtles
def bgcolor(self, *args):
"""Set or return backgroundcolor of the TurtleScreen.
Arguments (if given): a color string or three numbers
in the range 0..colormode or a 3-tuple of such numbers.
Example (for a TurtleScreen instance named screen):
>>> screen.bgcolor("orange")
>>> screen.bgcolor()
'orange'
>>> screen.bgcolor(0.5,0,0.5)
>>> screen.bgcolor()
'#800080'
"""
if args:
color = self._colorstr(args)
else:
color = None
color = self._bgcolor(color)
if color is not None:
color = self._color(color)
return color
def tracer(self, n=None, delay=None):
"""Turns turtle animation on/off and set delay for update drawings.
Optional arguments:
n -- nonnegative integer
delay -- nonnegative integer
If n is given, only each n-th regular screen update is really performed.
(Can be used to accelerate the drawing of complex graphics.)
Second arguments sets delay value (see RawTurtle.delay())
Example (for a TurtleScreen instance named screen):
>>> screen.tracer(8, 25)
>>> dist = 2
>>> for i in range(200):
... fd(dist)
... rt(90)
... dist += 2
"""
if n is None:
return self._tracing
self._tracing = int(n)
self._updatecounter = 0
if delay is not None:
self._delayvalue = int(delay)
if self._tracing:
self.update()
def delay(self, delay=None):
""" Return or set the drawing delay in milliseconds.
Optional argument:
delay -- positive integer
Example (for a TurtleScreen instance named screen):
>>> screen.delay(15)
>>> screen.delay()
15
"""
if delay is None:
return self._delayvalue
self._delayvalue = int(delay)
def _incrementudc(self):
"""Increment update counter."""
if not TurtleScreen._RUNNING:
TurtleScreen._RUNNING = True
raise Terminator
if self._tracing > 0:
self._updatecounter += 1
self._updatecounter %= self._tracing
def update(self):
"""Perform a TurtleScreen update.
"""
tracing = self._tracing
self._tracing = True
for t in self.turtles():
t._update_data()
t._drawturtle()
self._tracing = tracing
self._update()
def window_width(self):
""" Return the width of the turtle window.
Example (for a TurtleScreen instance named screen):
>>> screen.window_width()
640
"""
return self._window_size()[0]
def window_height(self):
""" Return the height of the turtle window.
Example (for a TurtleScreen instance named screen):
>>> screen.window_height()
480
"""
return self._window_size()[1]
def getcanvas(self):
"""Return the Canvas of this TurtleScreen.
No argument.
Example (for a Screen instance named screen):
>>> cv = screen.getcanvas()
>>> cv
<turtle.ScrolledCanvas instance at 0x010742D8>
"""
return self.cv
def getshapes(self):
"""Return a list of names of all currently available turtle shapes.
No argument.
Example (for a TurtleScreen instance named screen):
>>> screen.getshapes()
['arrow', 'blank', 'circle', ... , 'turtle']
"""
return sorted(self._shapes.keys())
def onclick(self, fun, btn=1, add=None):
"""Bind fun to mouse-click event on canvas.
Arguments:
fun -- a function with two arguments, the coordinates of the
clicked point on the canvas.
num -- the number of the mouse-button, defaults to 1
Example (for a TurtleScreen instance named screen)
>>> screen.onclick(goto)
>>> # Subsequently clicking into the TurtleScreen will
>>> # make the turtle move to the clicked point.
>>> screen.onclick(None)
"""
self._onscreenclick(fun, btn, add)
def onkey(self, fun, key):
"""Bind fun to key-release event of key.
Arguments:
fun -- a function with no arguments
key -- a string: key (e.g. "a") or key-symbol (e.g. "space")
In order to be able to register key-events, TurtleScreen
must have focus. (See method listen.)
Example (for a TurtleScreen instance named screen):
>>> def f():
... fd(50)
... lt(60)
...
>>> screen.onkey(f, "Up")
>>> screen.listen()
Subsequently the turtle can be moved by repeatedly pressing
the up-arrow key, consequently drawing a hexagon
"""
if fun is None:
if key in self._keys:
self._keys.remove(key)
elif key not in self._keys:
self._keys.append(key)
self._onkeyrelease(fun, key)
def onkeypress(self, fun, key=None):
"""Bind fun to key-press event of key if key is given,
or to any key-press-event if no key is given.
Arguments:
fun -- a function with no arguments
key -- a string: key (e.g. "a") or key-symbol (e.g. "space")
In order to be able to register key-events, TurtleScreen
must have focus. (See method listen.)
Example (for a TurtleScreen instance named screen
and a Turtle instance named turtle):
>>> def f():
... fd(50)
... lt(60)
...
>>> screen.onkeypress(f, "Up")
>>> screen.listen()
Subsequently the turtle can be moved by repeatedly pressing
the up-arrow key, or by keeping pressed the up-arrow key.
consequently drawing a hexagon.
"""
if fun is None:
if key in self._keys:
self._keys.remove(key)
elif key is not None and key not in self._keys:
self._keys.append(key)
self._onkeypress(fun, key)
def listen(self, xdummy=None, ydummy=None):
"""Set focus on TurtleScreen (in order to collect key-events)
No arguments.
Dummy arguments are provided in order
to be able to pass listen to the onclick method.
Example (for a TurtleScreen instance named screen):
>>> screen.listen()
"""
self._listen()
def ontimer(self, fun, t=0):
"""Install a timer, which calls fun after t milliseconds.
Arguments:
fun -- a function with no arguments.
t -- a number >= 0
Example (for a TurtleScreen instance named screen):
>>> running = True
>>> def f():
... if running:
... fd(50)
... lt(60)
... screen.ontimer(f, 250)
...
>>> f() # makes the turtle marching around
>>> running = False
"""
self._ontimer(fun, t)
def bgpic(self, picname=None):
"""Set background image or return name of current backgroundimage.
Optional argument:
picname -- a string, name of a gif-file or "nopic".
If picname is a filename, set the corresponding image as background.
If picname is "nopic", delete backgroundimage, if present.
If picname is None, return the filename of the current backgroundimage.
Example (for a TurtleScreen instance named screen):
>>> screen.bgpic()
'nopic'
>>> screen.bgpic("landscape.gif")
>>> screen.bgpic()
'landscape.gif'
"""
if picname is None:
return self._bgpicname
if picname not in self._bgpics:
self._bgpics[picname] = self._image(picname)
self._setbgpic(self._bgpic, self._bgpics[picname])
self._bgpicname = picname
def screensize(self, canvwidth=None, canvheight=None, bg=None):
"""Resize the canvas the turtles are drawing on.
Optional arguments:
canvwidth -- positive integer, new width of canvas in pixels
canvheight -- positive integer, new height of canvas in pixels
bg -- colorstring or color-tuple, new backgroundcolor
If no arguments are given, return current (canvaswidth, canvasheight)
Do not alter the drawing window. To observe hidden parts of
the canvas use the scrollbars. (Can make visible those parts
of a drawing, which were outside the canvas before!)
Example (for a Turtle instance named turtle):
>>> turtle.screensize(2000,1500)
>>> # e.g. to search for an erroneously escaped turtle ;-)
"""
return self._resize(canvwidth, canvheight, bg)
onscreenclick = onclick
resetscreen = reset
clearscreen = clear
addshape = register_shape
onkeyrelease = onkey
class TNavigator(object):
"""Navigation part of the RawTurtle.
Implements methods for turtle movement.
"""
START_ORIENTATION = {
"standard": Vec2D(1.0, 0.0),
"world" : Vec2D(1.0, 0.0),
"logo" : Vec2D(0.0, 1.0) }
DEFAULT_MODE = "standard"
DEFAULT_ANGLEOFFSET = 0
DEFAULT_ANGLEORIENT = 1
def __init__(self, mode=DEFAULT_MODE):
self._angleOffset = self.DEFAULT_ANGLEOFFSET
self._angleOrient = self.DEFAULT_ANGLEORIENT
self._mode = mode
self.undobuffer = None
self.degrees()
self._mode = None
self._setmode(mode)
TNavigator.reset(self)
def reset(self):
"""reset turtle to its initial values
Will be overwritten by parent class
"""
self._position = Vec2D(0.0, 0.0)
self._orient = TNavigator.START_ORIENTATION[self._mode]
def _setmode(self, mode=None):
"""Set turtle-mode to 'standard', 'world' or 'logo'.
"""
if mode is None:
return self._mode
if mode not in ["standard", "logo", "world"]:
return
self._mode = mode
if mode in ["standard", "world"]:
self._angleOffset = 0
self._angleOrient = 1
else: # mode == "logo":
self._angleOffset = self._fullcircle/4.
self._angleOrient = -1
def _setDegreesPerAU(self, fullcircle):
"""Helper function for degrees() and radians()"""
self._fullcircle = fullcircle
self._degreesPerAU = 360/fullcircle
if self._mode == "standard":
self._angleOffset = 0
else:
self._angleOffset = fullcircle/4.
def degrees(self, fullcircle=360.0):
""" Set angle measurement units to degrees.
Optional argument:
fullcircle - a number
Set angle measurement units, i. e. set number
of 'degrees' for a full circle. Dafault value is
360 degrees.
Example (for a Turtle instance named turtle):
>>> turtle.left(90)
>>> turtle.heading()
90
Change angle measurement unit to grad (also known as gon,
grade, or gradian and equals 1/100-th of the right angle.)
>>> turtle.degrees(400.0)
>>> turtle.heading()
100
"""
self._setDegreesPerAU(fullcircle)
def radians(self):
""" Set the angle measurement units to radians.
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.heading()
90
>>> turtle.radians()
>>> turtle.heading()
1.5707963267948966
"""
self._setDegreesPerAU(2*math.pi)
def _go(self, distance):
"""move turtle forward by specified distance"""
ende = self._position + self._orient * distance
self._goto(ende)
def _rotate(self, angle):
"""Turn turtle counterclockwise by specified angle if angle > 0."""
angle *= self._degreesPerAU
self._orient = self._orient.rotate(angle)
def _goto(self, end):
"""move turtle to position end."""
self._position = end
def forward(self, distance):
"""Move the turtle forward by the specified distance.
Aliases: forward | fd
Argument:
distance -- a number (integer or float)
Move the turtle forward by the specified distance, in the direction
the turtle is headed.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 0.00)
>>> turtle.forward(25)
>>> turtle.position()
(25.00,0.00)
>>> turtle.forward(-75)
>>> turtle.position()
(-50.00,0.00)
"""
self._go(distance)
def back(self, distance):
"""Move the turtle backward by distance.
Aliases: back | backward | bk
Argument:
distance -- a number
Move the turtle backward by distance ,opposite to the direction the
turtle is headed. Do not change the turtle's heading.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 0.00)
>>> turtle.backward(30)
>>> turtle.position()
(-30.00, 0.00)
"""
self._go(-distance)
def right(self, angle):
"""Turn turtle right by angle units.
Aliases: right | rt
Argument:
angle -- a number (integer or float)
Turn turtle right by angle units. (Units are by default degrees,
but can be set via the degrees() and radians() functions.)
Angle orientation depends on mode. (See this.)
Example (for a Turtle instance named turtle):
>>> turtle.heading()
22.0
>>> turtle.right(45)
>>> turtle.heading()
337.0
"""
self._rotate(-angle)
def left(self, angle):
"""Turn turtle left by angle units.
Aliases: left | lt
Argument:
angle -- a number (integer or float)
Turn turtle left by angle units. (Units are by default degrees,
but can be set via the degrees() and radians() functions.)
Angle orientation depends on mode. (See this.)
Example (for a Turtle instance named turtle):
>>> turtle.heading()
22.0
>>> turtle.left(45)
>>> turtle.heading()
67.0
"""
self._rotate(angle)
def pos(self):
"""Return the turtle's current location (x,y), as a Vec2D-vector.
Aliases: pos | position
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(0.00, 240.00)
"""
return self._position
def xcor(self):
""" Return the turtle's x coordinate.
No arguments.
Example (for a Turtle instance named turtle):
>>> reset()
>>> turtle.left(60)
>>> turtle.forward(100)
>>> print turtle.xcor()
50.0
"""
return self._position[0]
def ycor(self):
""" Return the turtle's y coordinate
---
No arguments.
Example (for a Turtle instance named turtle):
>>> reset()
>>> turtle.left(60)
>>> turtle.forward(100)
>>> print turtle.ycor()
86.6025403784
"""
return self._position[1]
def goto(self, x, y=None):
"""Move turtle to an absolute position.
Aliases: setpos | setposition | goto:
Arguments:
x -- a number or a pair/vector of numbers
y -- a number None
call: goto(x, y) # two coordinates
--or: goto((x, y)) # a pair (tuple) of coordinates
--or: goto(vec) # e.g. as returned by pos()
Move turtle to an absolute position. If the pen is down,
a line will be drawn. The turtle's orientation does not change.
Example (for a Turtle instance named turtle):
>>> tp = turtle.pos()
>>> tp
(0.00, 0.00)
>>> turtle.setpos(60,30)
>>> turtle.pos()
(60.00,30.00)
>>> turtle.setpos((20,80))
>>> turtle.pos()
(20.00,80.00)
>>> turtle.setpos(tp)
>>> turtle.pos()
(0.00,0.00)
"""
if y is None:
self._goto(Vec2D(*x))
else:
self._goto(Vec2D(x, y))
def home(self):
"""Move turtle to the origin - coordinates (0,0).
No arguments.
Move turtle to the origin - coordinates (0,0) and set its
heading to its start-orientation (which depends on mode).
Example (for a Turtle instance named turtle):
>>> turtle.home()
"""
self.goto(0, 0)
self.setheading(0)
def setx(self, x):
"""Set the turtle's first coordinate to x
Argument:
x -- a number (integer or float)
Set the turtle's first coordinate to x, leave second coordinate
unchanged.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 240.00)
>>> turtle.setx(10)
>>> turtle.position()
(10.00, 240.00)
"""
self._goto(Vec2D(x, self._position[1]))
def sety(self, y):
"""Set the turtle's second coordinate to y
Argument:
y -- a number (integer or float)
Set the turtle's first coordinate to x, second coordinate remains
unchanged.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 40.00)
>>> turtle.sety(-10)
>>> turtle.position()
(0.00, -10.00)
"""
self._goto(Vec2D(self._position[0], y))
def distance(self, x, y=None):
"""Return the distance from the turtle to (x,y) in turtle step units.
Arguments:
x -- a number or a pair/vector of numbers or a turtle instance
y -- a number None None
call: distance(x, y) # two coordinates
--or: distance((x, y)) # a pair (tuple) of coordinates
--or: distance(vec) # e.g. as returned by pos()
--or: distance(mypen) # where mypen is another turtle
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(0.00, 0.00)
>>> turtle.distance(30,40)
50.0
>>> pen = Turtle()
>>> pen.forward(77)
>>> turtle.distance(pen)
77.0
"""
if y is not None:
pos = Vec2D(x, y)
if isinstance(x, Vec2D):
pos = x
elif isinstance(x, tuple):
pos = Vec2D(*x)
elif isinstance(x, TNavigator):
pos = x._position
return abs(pos - self._position)
def towards(self, x, y=None):
"""Return the angle of the line from the turtle's position to (x, y).
Arguments:
x -- a number or a pair/vector of numbers or a turtle instance
y -- a number None None
call: distance(x, y) # two coordinates
--or: distance((x, y)) # a pair (tuple) of coordinates
--or: distance(vec) # e.g. as returned by pos()
--or: distance(mypen) # where mypen is another turtle
Return the angle, between the line from turtle-position to position
specified by x, y and the turtle's start orientation. (Depends on
modes - "standard" or "logo")
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(10.00, 10.00)
>>> turtle.towards(0,0)
225.0
"""
if y is not None:
pos = Vec2D(x, y)
if isinstance(x, Vec2D):
pos = x
elif isinstance(x, tuple):
pos = Vec2D(*x)
elif isinstance(x, TNavigator):
pos = x._position
x, y = pos - self._position
result = round(math.atan2(y, x)*180.0/math.pi, 10) % 360.0
result /= self._degreesPerAU
return (self._angleOffset + self._angleOrient*result) % self._fullcircle
def heading(self):
""" Return the turtle's current heading.
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.left(67)
>>> turtle.heading()
67.0
"""
x, y = self._orient
result = round(math.atan2(y, x)*180.0/math.pi, 10) % 360.0
result /= self._degreesPerAU
return (self._angleOffset + self._angleOrient*result) % self._fullcircle
def setheading(self, to_angle):
"""Set the orientation of the turtle to to_angle.
Aliases: setheading | seth
Argument:
to_angle -- a number (integer or float)
Set the orientation of the turtle to to_angle.
Here are some common directions in degrees:
standard - mode: logo-mode:
-------------------|--------------------
0 - east 0 - north
90 - north 90 - east
180 - west 180 - south
270 - south 270 - west
Example (for a Turtle instance named turtle):
>>> turtle.setheading(90)
>>> turtle.heading()
90
"""
angle = (to_angle - self.heading())*self._angleOrient
full = self._fullcircle
angle = (angle+full/2.)%full - full/2.
self._rotate(angle)
def circle(self, radius, extent = None, steps = None):
""" Draw a circle with given radius.
Arguments:
radius -- a number
extent (optional) -- a number
steps (optional) -- an integer
Draw a circle with given radius. The center is radius units left
of the turtle; extent - an angle - determines which part of the
circle is drawn. If extent is not given, draw the entire circle.
If extent is not a full circle, one endpoint of the arc is the
current pen position. Draw the arc in counterclockwise direction
if radius is positive, otherwise in clockwise direction. Finally
the direction of the turtle is changed by the amount of extent.
As the circle is approximated by an inscribed regular polygon,
steps determines the number of steps to use. If not given,
it will be calculated automatically. Maybe used to draw regular
polygons.
call: circle(radius) # full circle
--or: circle(radius, extent) # arc
--or: circle(radius, extent, steps)
--or: circle(radius, steps=6) # 6-sided polygon
Example (for a Turtle instance named turtle):
>>> turtle.circle(50)
>>> turtle.circle(120, 180) # semicircle
"""
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
speed = self.speed()
if extent is None:
extent = self._fullcircle
if steps is None:
frac = abs(extent)/self._fullcircle
steps = 1+int(min(11+abs(radius)/6.0, 59.0)*frac)
w = 1.0 * extent / steps
w2 = 0.5 * w
l = 2.0 * radius * math.sin(w2*math.pi/180.0*self._degreesPerAU)
if radius < 0:
l, w, w2 = -l, -w, -w2
tr = self._tracer()
dl = self._delay()
if speed == 0:
self._tracer(0, 0)
else:
self.speed(0)
self._rotate(w2)
for i in range(steps):
self.speed(speed)
self._go(l)
self.speed(0)
self._rotate(w)
self._rotate(-w2)
if speed == 0:
self._tracer(tr, dl)
self.speed(speed)
if self.undobuffer:
self.undobuffer.cumulate = False
def speed(self, s=0):
"""dummy method - to be overwritten by child class"""
def _tracer(self, a=None, b=None):
"""dummy method - to be overwritten by child class"""
def _delay(self, n=None):
"""dummy method - to be overwritten by child class"""
fd = forward
bk = back
backward = back
rt = right
lt = left
position = pos
setpos = goto
setposition = goto
seth = setheading
class TPen(object):
"""Drawing part of the RawTurtle.
Implements drawing properties.
"""
def __init__(self, resizemode=_CFG["resizemode"]):
self._resizemode = resizemode # or "user" or "noresize"
self.undobuffer = None
TPen._reset(self)
def _reset(self, pencolor=_CFG["pencolor"],
fillcolor=_CFG["fillcolor"]):
self._pensize = 1
self._shown = True
self._pencolor = pencolor
self._fillcolor = fillcolor
self._drawing = True
self._speed = 3
self._stretchfactor = (1., 1.)
self._shearfactor = 0.
self._tilt = 0.
self._shapetrafo = (1., 0., 0., 1.)
self._outlinewidth = 1
def resizemode(self, rmode=None):
"""Set resizemode to one of the values: "auto", "user", "noresize".
(Optional) Argument:
rmode -- one of the strings "auto", "user", "noresize"
Different resizemodes have the following effects:
- "auto" adapts the appearance of the turtle
corresponding to the value of pensize.
- "user" adapts the appearance of the turtle according to the
values of stretchfactor and outlinewidth (outline),
which are set by shapesize()
- "noresize" no adaption of the turtle's appearance takes place.
If no argument is given, return current resizemode.
resizemode("user") is called by a call of shapesize with arguments.
Examples (for a Turtle instance named turtle):
>>> turtle.resizemode("noresize")
>>> turtle.resizemode()
'noresize'
"""
if rmode is None:
return self._resizemode
rmode = rmode.lower()
if rmode in ["auto", "user", "noresize"]:
self.pen(resizemode=rmode)
def pensize(self, width=None):
"""Set or return the line thickness.
Aliases: pensize | width
Argument:
width -- positive number
Set the line thickness to width or return it. If resizemode is set
to "auto" and turtleshape is a polygon, that polygon is drawn with
the same line thickness. If no argument is given, current pensize
is returned.
Example (for a Turtle instance named turtle):
>>> turtle.pensize()
1
>>> turtle.pensize(10) # from here on lines of width 10 are drawn
"""
if width is None:
return self._pensize
self.pen(pensize=width)
def penup(self):
"""Pull the pen up -- no drawing when moving.
Aliases: penup | pu | up
No argument
Example (for a Turtle instance named turtle):
>>> turtle.penup()
"""
if not self._drawing:
return
self.pen(pendown=False)
def pendown(self):
"""Pull the pen down -- drawing when moving.
Aliases: pendown | pd | down
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.pendown()
"""
if self._drawing:
return
self.pen(pendown=True)
def isdown(self):
"""Return True if pen is down, False if it's up.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.penup()
>>> turtle.isdown()
False
>>> turtle.pendown()
>>> turtle.isdown()
True
"""
return self._drawing
def speed(self, speed=None):
""" Return or set the turtle's speed.
Optional argument:
speed -- an integer in the range 0..10 or a speedstring (see below)
Set the turtle's speed to an integer value in the range 0 .. 10.
If no argument is given: return current speed.
If input is a number greater than 10 or smaller than 0.5,
speed is set to 0.
Speedstrings are mapped to speedvalues in the following way:
'fastest' : 0
'fast' : 10
'normal' : 6
'slow' : 3
'slowest' : 1
speeds from 1 to 10 enforce increasingly faster animation of
line drawing and turtle turning.
Attention:
speed = 0 : *no* animation takes place. forward/back makes turtle jump
and likewise left/right make the turtle turn instantly.
Example (for a Turtle instance named turtle):
>>> turtle.speed(3)
"""
speeds = {'fastest':0, 'fast':10, 'normal':6, 'slow':3, 'slowest':1 }
if speed is None:
return self._speed
if speed in speeds:
speed = speeds[speed]
elif 0.5 < speed < 10.5:
speed = int(round(speed))
else:
speed = 0
self.pen(speed=speed)
def color(self, *args):
"""Return or set the pencolor and fillcolor.
Arguments:
Several input formats are allowed.
They use 0, 1, 2, or 3 arguments as follows:
color()
Return the current pencolor and the current fillcolor
as a pair of color specification strings as are returned
by pencolor and fillcolor.
color(colorstring), color((r,g,b)), color(r,g,b)
inputs as in pencolor, set both, fillcolor and pencolor,
to the given value.
color(colorstring1, colorstring2),
color((r1,g1,b1), (r2,g2,b2))
equivalent to pencolor(colorstring1) and fillcolor(colorstring2)
and analogously, if the other input format is used.
If turtleshape is a polygon, outline and interior of that polygon
is drawn with the newly set colors.
For mor info see: pencolor, fillcolor
Example (for a Turtle instance named turtle):
>>> turtle.color('red', 'green')
>>> turtle.color()
('red', 'green')
>>> colormode(255)
>>> color((40, 80, 120), (160, 200, 240))
>>> color()
('#285078', '#a0c8f0')
"""
if args:
l = len(args)
if l == 1:
pcolor = fcolor = args[0]
elif l == 2:
pcolor, fcolor = args
elif l == 3:
pcolor = fcolor = args
pcolor = self._colorstr(pcolor)
fcolor = self._colorstr(fcolor)
self.pen(pencolor=pcolor, fillcolor=fcolor)
else:
return self._color(self._pencolor), self._color(self._fillcolor)
def pencolor(self, *args):
""" Return or set the pencolor.
Arguments:
Four input formats are allowed:
- pencolor()
Return the current pencolor as color specification string,
possibly in hex-number format (see example).
May be used as input to another color/pencolor/fillcolor call.
- pencolor(colorstring)
s is a Tk color specification string, such as "red" or "yellow"
- pencolor((r, g, b))
*a tuple* of r, g, and b, which represent, an RGB color,
and each of r, g, and b are in the range 0..colormode,
where colormode is either 1.0 or 255
- pencolor(r, g, b)
r, g, and b represent an RGB color, and each of r, g, and b
are in the range 0..colormode
If turtleshape is a polygon, the outline of that polygon is drawn
with the newly set pencolor.
Example (for a Turtle instance named turtle):
>>> turtle.pencolor('brown')
>>> tup = (0.2, 0.8, 0.55)
>>> turtle.pencolor(tup)
>>> turtle.pencolor()
'#33cc8c'
"""
if args:
color = self._colorstr(args)
if color == self._pencolor:
return
self.pen(pencolor=color)
else:
return self._color(self._pencolor)
def fillcolor(self, *args):
""" Return or set the fillcolor.
Arguments:
Four input formats are allowed:
- fillcolor()
Return the current fillcolor as color specification string,
possibly in hex-number format (see example).
May be used as input to another color/pencolor/fillcolor call.
- fillcolor(colorstring)
s is a Tk color specification string, such as "red" or "yellow"
- fillcolor((r, g, b))
*a tuple* of r, g, and b, which represent, an RGB color,
and each of r, g, and b are in the range 0..colormode,
where colormode is either 1.0 or 255
- fillcolor(r, g, b)
r, g, and b represent an RGB color, and each of r, g, and b
are in the range 0..colormode
If turtleshape is a polygon, the interior of that polygon is drawn
with the newly set fillcolor.
Example (for a Turtle instance named turtle):
>>> turtle.fillcolor('violet')
>>> col = turtle.pencolor()
>>> turtle.fillcolor(col)
>>> turtle.fillcolor(0, .5, 0)
"""
if args:
color = self._colorstr(args)
if color == self._fillcolor:
return
self.pen(fillcolor=color)
else:
return self._color(self._fillcolor)
def showturtle(self):
"""Makes the turtle visible.
Aliases: showturtle | st
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
>>> turtle.showturtle()
"""
self.pen(shown=True)
def hideturtle(self):
"""Makes the turtle invisible.
Aliases: hideturtle | ht
No argument.
It's a good idea to do this while you're in the
middle of a complicated drawing, because hiding
the turtle speeds up the drawing observably.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
"""
self.pen(shown=False)
def isvisible(self):
"""Return True if the Turtle is shown, False if it's hidden.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
>>> print turtle.isvisible():
False
"""
return self._shown
def pen(self, pen=None, **pendict):
"""Return or set the pen's attributes.
Arguments:
pen -- a dictionary with some or all of the below listed keys.
**pendict -- one or more keyword-arguments with the below
listed keys as keywords.
Return or set the pen's attributes in a 'pen-dictionary'
with the following key/value pairs:
"shown" : True/False
"pendown" : True/False
"pencolor" : color-string or color-tuple
"fillcolor" : color-string or color-tuple
"pensize" : positive number
"speed" : number in range 0..10
"resizemode" : "auto" or "user" or "noresize"
"stretchfactor": (positive number, positive number)
"shearfactor": number
"outline" : positive number
"tilt" : number
This dictionary can be used as argument for a subsequent
pen()-call to restore the former pen-state. Moreover one
or more of these attributes can be provided as keyword-arguments.
This can be used to set several pen attributes in one statement.
Examples (for a Turtle instance named turtle):
>>> turtle.pen(fillcolor="black", pencolor="red", pensize=10)
>>> turtle.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'red', 'pendown': True, 'fillcolor': 'black',
'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0}
>>> penstate=turtle.pen()
>>> turtle.color("yellow","")
>>> turtle.penup()
>>> turtle.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'yellow', 'pendown': False, 'fillcolor': '',
'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0}
>>> p.pen(penstate, fillcolor="green")
>>> p.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'red', 'pendown': True, 'fillcolor': 'green',
'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0}
"""
_pd = {"shown" : self._shown,
"pendown" : self._drawing,
"pencolor" : self._pencolor,
"fillcolor" : self._fillcolor,
"pensize" : self._pensize,
"speed" : self._speed,
"resizemode" : self._resizemode,
"stretchfactor" : self._stretchfactor,
"shearfactor" : self._shearfactor,
"outline" : self._outlinewidth,
"tilt" : self._tilt
}
if not (pen or pendict):
return _pd
if isinstance(pen, dict):
p = pen
else:
p = {}
p.update(pendict)
_p_buf = {}
for key in p:
_p_buf[key] = _pd[key]
if self.undobuffer:
self.undobuffer.push(("pen", _p_buf))
newLine = False
if "pendown" in p:
if self._drawing != p["pendown"]:
newLine = True
if "pencolor" in p:
if isinstance(p["pencolor"], tuple):
p["pencolor"] = self._colorstr((p["pencolor"],))
if self._pencolor != p["pencolor"]:
newLine = True
if "pensize" in p:
if self._pensize != p["pensize"]:
newLine = True
if newLine:
self._newLine()
if "pendown" in p:
self._drawing = p["pendown"]
if "pencolor" in p:
self._pencolor = p["pencolor"]
if "pensize" in p:
self._pensize = p["pensize"]
if "fillcolor" in p:
if isinstance(p["fillcolor"], tuple):
p["fillcolor"] = self._colorstr((p["fillcolor"],))
self._fillcolor = p["fillcolor"]
if "speed" in p:
self._speed = p["speed"]
if "resizemode" in p:
self._resizemode = p["resizemode"]
if "stretchfactor" in p:
sf = p["stretchfactor"]
if isinstance(sf, (int, float)):
sf = (sf, sf)
self._stretchfactor = sf
if "shearfactor" in p:
self._shearfactor = p["shearfactor"]
if "outline" in p:
self._outlinewidth = p["outline"]
if "shown" in p:
self._shown = p["shown"]
if "tilt" in p:
self._tilt = p["tilt"]
if "stretchfactor" in p or "tilt" in p or "shearfactor" in p:
scx, scy = self._stretchfactor
shf = self._shearfactor
sa, ca = math.sin(self._tilt), math.cos(self._tilt)
self._shapetrafo = ( scx*ca, scy*(shf*ca + sa),
-scx*sa, scy*(ca - shf*sa))
self._update()
def _newLine(self, usePos = True):
"""dummy method - to be overwritten by child class"""
def _update(self, count=True, forced=False):
"""dummy method - to be overwritten by child class"""
def _color(self, args):
"""dummy method - to be overwritten by child class"""
def _colorstr(self, args):
"""dummy method - to be overwritten by child class"""
width = pensize
up = penup
pu = penup
pd = pendown
down = pendown
st = showturtle
ht = hideturtle
class _TurtleImage(object):
"""Helper class: Datatype to store Turtle attributes
"""
def __init__(self, screen, shapeIndex):
self.screen = screen
self._type = None
self._setshape(shapeIndex)
def _setshape(self, shapeIndex):
screen = self.screen
self.shapeIndex = shapeIndex
if self._type == "polygon" == screen._shapes[shapeIndex]._type:
return
if self._type == "image" == screen._shapes[shapeIndex]._type:
return
if self._type in ["image", "polygon"]:
screen._delete(self._item)
elif self._type == "compound":
for item in self._item:
screen._delete(item)
self._type = screen._shapes[shapeIndex]._type
if self._type == "polygon":
self._item = screen._createpoly()
elif self._type == "image":
self._item = screen._createimage(screen._shapes["blank"]._data)
elif self._type == "compound":
self._item = [screen._createpoly() for item in
screen._shapes[shapeIndex]._data]
class RawTurtle(TPen, TNavigator):
"""Animation part of the RawTurtle.
Puts RawTurtle upon a TurtleScreen and provides tools for
its animation.
"""
screens = []
def __init__(self, canvas=None,
shape=_CFG["shape"],
undobuffersize=_CFG["undobuffersize"],
visible=_CFG["visible"]):
if isinstance(canvas, _Screen):
self.screen = canvas
elif isinstance(canvas, TurtleScreen):
if canvas not in RawTurtle.screens:
RawTurtle.screens.append(canvas)
self.screen = canvas
elif isinstance(canvas, (ScrolledCanvas, Canvas)):
for screen in RawTurtle.screens:
if screen.cv == canvas:
self.screen = screen
break
else:
self.screen = TurtleScreen(canvas)
RawTurtle.screens.append(self.screen)
else:
raise TurtleGraphicsError("bad canvas argument %s" % canvas)
screen = self.screen
TNavigator.__init__(self, screen.mode())
TPen.__init__(self)
screen._turtles.append(self)
self.drawingLineItem = screen._createline()
self.turtle = _TurtleImage(screen, shape)
self._poly = None
self._creatingPoly = False
self._fillitem = self._fillpath = None
self._shown = visible
self._hidden_from_screen = False
self.currentLineItem = screen._createline()
self.currentLine = [self._position]
self.items = [self.currentLineItem]
self.stampItems = []
self._undobuffersize = undobuffersize
self.undobuffer = Tbuffer(undobuffersize)
self._update()
def reset(self):
"""Delete the turtle's drawings and restore its default values.
No argument.
Delete the turtle's drawings from the screen, re-center the turtle
and set variables to the default values.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00,-22.00)
>>> turtle.heading()
100.0
>>> turtle.reset()
>>> turtle.position()
(0.00,0.00)
>>> turtle.heading()
0.0
"""
TNavigator.reset(self)
TPen._reset(self)
self._clear()
self._drawturtle()
self._update()
def setundobuffer(self, size):
"""Set or disable undobuffer.
Argument:
size -- an integer or None
If size is an integer an empty undobuffer of given size is installed.
Size gives the maximum number of turtle-actions that can be undone
by the undo() function.
If size is None, no undobuffer is present.
Example (for a Turtle instance named turtle):
>>> turtle.setundobuffer(42)
"""
if size is None or size <= 0:
self.undobuffer = None
else:
self.undobuffer = Tbuffer(size)
def undobufferentries(self):
"""Return count of entries in the undobuffer.
No argument.
Example (for a Turtle instance named turtle):
>>> while undobufferentries():
... undo()
"""
if self.undobuffer is None:
return 0
return self.undobuffer.nr_of_items()
def _clear(self):
"""Delete all of pen's drawings"""
self._fillitem = self._fillpath = None
for item in self.items:
self.screen._delete(item)
self.currentLineItem = self.screen._createline()
self.currentLine = []
if self._drawing:
self.currentLine.append(self._position)
self.items = [self.currentLineItem]
self.clearstamps()
self.setundobuffer(self._undobuffersize)
def clear(self):
"""Delete the turtle's drawings from the screen. Do not move turtle.
No arguments.
Delete the turtle's drawings from the screen. Do not move turtle.
State and position of the turtle as well as drawings of other
turtles are not affected.
Examples (for a Turtle instance named turtle):
>>> turtle.clear()
"""
self._clear()
self._update()
def _update_data(self):
self.screen._incrementudc()
if self.screen._updatecounter != 0:
return
if len(self.currentLine)>1:
self.screen._drawline(self.currentLineItem, self.currentLine,
self._pencolor, self._pensize)
def _update(self):
"""Perform a Turtle-data update.
"""
screen = self.screen
if screen._tracing == 0:
return
elif screen._tracing == 1:
self._update_data()
self._drawturtle()
screen._update() # TurtleScreenBase
screen._delay(screen._delayvalue) # TurtleScreenBase
else:
self._update_data()
if screen._updatecounter == 0:
for t in screen.turtles():
t._drawturtle()
screen._update()
def _tracer(self, flag=None, delay=None):
"""Turns turtle animation on/off and set delay for update drawings.
Optional arguments:
n -- nonnegative integer
delay -- nonnegative integer
If n is given, only each n-th regular screen update is really performed.
(Can be used to accelerate the drawing of complex graphics.)
Second arguments sets delay value (see RawTurtle.delay())
Example (for a Turtle instance named turtle):
>>> turtle.tracer(8, 25)
>>> dist = 2
>>> for i in range(200):
... turtle.fd(dist)
... turtle.rt(90)
... dist += 2
"""
return self.screen.tracer(flag, delay)
def _color(self, args):
return self.screen._color(args)
def _colorstr(self, args):
return self.screen._colorstr(args)
def _cc(self, args):
"""Convert colortriples to hexstrings.
"""
if isinstance(args, str):
return args
try:
r, g, b = args
except (TypeError, ValueError):
raise TurtleGraphicsError("bad color arguments: %s" % str(args))
if self.screen._colormode == 1.0:
r, g, b = [round(255.0*x) for x in (r, g, b)]
if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)):
raise TurtleGraphicsError("bad color sequence: %s" % str(args))
return "#%02x%02x%02x" % (r, g, b)
def clone(self):
"""Create and return a clone of the turtle.
No argument.
Create and return a clone of the turtle with same position, heading
and turtle properties.
Example (for a Turtle instance named mick):
mick = Turtle()
joe = mick.clone()
"""
screen = self.screen
self._newLine(self._drawing)
turtle = self.turtle
self.screen = None
self.turtle = None # too make self deepcopy-able
q = deepcopy(self)
self.screen = screen
self.turtle = turtle
q.screen = screen
q.turtle = _TurtleImage(screen, self.turtle.shapeIndex)
screen._turtles.append(q)
ttype = screen._shapes[self.turtle.shapeIndex]._type
if ttype == "polygon":
q.turtle._item = screen._createpoly()
elif ttype == "image":
q.turtle._item = screen._createimage(screen._shapes["blank"]._data)
elif ttype == "compound":
q.turtle._item = [screen._createpoly() for item in
screen._shapes[self.turtle.shapeIndex]._data]
q.currentLineItem = screen._createline()
q._update()
return q
def shape(self, name=None):
"""Set turtle shape to shape with given name / return current shapename.
Optional argument:
name -- a string, which is a valid shapename
Set turtle shape to shape with given name or, if name is not given,
return name of current shape.
Shape with name must exist in the TurtleScreen's shape dictionary.
Initially there are the following polygon shapes:
'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'.
To learn about how to deal with shapes see Screen-method register_shape.
Example (for a Turtle instance named turtle):
>>> turtle.shape()
'arrow'
>>> turtle.shape("turtle")
>>> turtle.shape()
'turtle'
"""
if name is None:
return self.turtle.shapeIndex
if not name in self.screen.getshapes():
raise TurtleGraphicsError("There is no shape named %s" % name)
self.turtle._setshape(name)
self._update()
def shapesize(self, stretch_wid=None, stretch_len=None, outline=None):
"""Set/return turtle's stretchfactors/outline. Set resizemode to "user".
Optional arguments:
stretch_wid : positive number
stretch_len : positive number
outline : positive number
Return or set the pen's attributes x/y-stretchfactors and/or outline.
Set resizemode to "user".
If and only if resizemode is set to "user", the turtle will be displayed
stretched according to its stretchfactors:
stretch_wid is stretchfactor perpendicular to orientation
stretch_len is stretchfactor in direction of turtles orientation.
outline determines the width of the shapes's outline.
Examples (for a Turtle instance named turtle):
>>> turtle.resizemode("user")
>>> turtle.shapesize(5, 5, 12)
>>> turtle.shapesize(outline=8)
"""
if stretch_wid is stretch_len is outline is None:
stretch_wid, stretch_len = self._stretchfactor
return stretch_wid, stretch_len, self._outlinewidth
if stretch_wid == 0 or stretch_len == 0:
raise TurtleGraphicsError("stretch_wid/stretch_len must not be zero")
if stretch_wid is not None:
if stretch_len is None:
stretchfactor = stretch_wid, stretch_wid
else:
stretchfactor = stretch_wid, stretch_len
elif stretch_len is not None:
stretchfactor = self._stretchfactor[0], stretch_len
else:
stretchfactor = self._stretchfactor
if outline is None:
outline = self._outlinewidth
self.pen(resizemode="user",
stretchfactor=stretchfactor, outline=outline)
def shearfactor(self, shear=None):
"""Set or return the current shearfactor.
Optional argument: shear -- number, tangent of the shear angle
Shear the turtleshape according to the given shearfactor shear,
which is the tangent of the shear angle. DO NOT change the
turtle's heading (direction of movement).
If shear is not given: return the current shearfactor, i. e. the
tangent of the shear angle, by which lines parallel to the
heading of the turtle are sheared.
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.shearfactor(0.5)
>>> turtle.shearfactor()
>>> 0.5
"""
if shear is None:
return self._shearfactor
self.pen(resizemode="user", shearfactor=shear)
def settiltangle(self, angle):
"""Rotate the turtleshape to point in the specified direction
Argument: angle -- number
Rotate the turtleshape to point in the direction specified by angle,
regardless of its current tilt-angle. DO NOT change the turtle's
heading (direction of movement).
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.settiltangle(45)
>>> stamp()
>>> turtle.fd(50)
>>> turtle.settiltangle(-45)
>>> stamp()
>>> turtle.fd(50)
"""
tilt = -angle * self._degreesPerAU * self._angleOrient
tilt = (tilt * math.pi / 180.0) % (2*math.pi)
self.pen(resizemode="user", tilt=tilt)
def tiltangle(self, angle=None):
"""Set or return the current tilt-angle.
Optional argument: angle -- number
Rotate the turtleshape to point in the direction specified by angle,
regardless of its current tilt-angle. DO NOT change the turtle's
heading (direction of movement).
If angle is not given: return the current tilt-angle, i. e. the angle
between the orientation of the turtleshape and the heading of the
turtle (its direction of movement).
Deprecated since Python 3.1
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.tilt(45)
>>> turtle.tiltangle()
"""
if angle is None:
tilt = -self._tilt * (180.0/math.pi) * self._angleOrient
return (tilt / self._degreesPerAU) % self._fullcircle
else:
self.settiltangle(angle)
def tilt(self, angle):
"""Rotate the turtleshape by angle.
Argument:
angle - a number
Rotate the turtleshape by angle from its current tilt-angle,
but do NOT change the turtle's heading (direction of movement).
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.tilt(30)
>>> turtle.fd(50)
>>> turtle.tilt(30)
>>> turtle.fd(50)
"""
self.settiltangle(angle + self.tiltangle())
def shapetransform(self, t11=None, t12=None, t21=None, t22=None):
"""Set or return the current transformation matrix of the turtle shape.
Optional arguments: t11, t12, t21, t22 -- numbers.
If none of the matrix elements are given, return the transformation
matrix.
Otherwise set the given elements and transform the turtleshape
according to the matrix consisting of first row t11, t12 and
second row t21, 22.
Modify stretchfactor, shearfactor and tiltangle according to the
given matrix.
Examples (for a Turtle instance named turtle):
>>> turtle.shape("square")
>>> turtle.shapesize(4,2)
>>> turtle.shearfactor(-0.5)
>>> turtle.shapetransform()
(4.0, -1.0, -0.0, 2.0)
"""
if t11 is t12 is t21 is t22 is None:
return self._shapetrafo
m11, m12, m21, m22 = self._shapetrafo
if t11 is not None: m11 = t11
if t12 is not None: m12 = t12
if t21 is not None: m21 = t21
if t22 is not None: m22 = t22
if t11 * t22 - t12 * t21 == 0:
raise TurtleGraphicsError("Bad shape transform matrix: must not be singular")
self._shapetrafo = (m11, m12, m21, m22)
alfa = math.atan2(-m21, m11) % (2 * math.pi)
sa, ca = math.sin(alfa), math.cos(alfa)
a11, a12, a21, a22 = (ca*m11 - sa*m21, ca*m12 - sa*m22,
sa*m11 + ca*m21, sa*m12 + ca*m22)
self._stretchfactor = a11, a22
self._shearfactor = a12/a22
self._tilt = alfa
self.pen(resizemode="user")
def _polytrafo(self, poly):
"""Computes transformed polygon shapes from a shape
according to current position and heading.
"""
screen = self.screen
p0, p1 = self._position
e0, e1 = self._orient
e = Vec2D(e0, e1 * screen.yscale / screen.xscale)
e0, e1 = (1.0 / abs(e)) * e
return [(p0+(e1*x+e0*y)/screen.xscale, p1+(-e0*x+e1*y)/screen.yscale)
for (x, y) in poly]
def get_shapepoly(self):
"""Return the current shape polygon as tuple of coordinate pairs.
No argument.
Examples (for a Turtle instance named turtle):
>>> turtle.shape("square")
>>> turtle.shapetransform(4, -1, 0, 2)
>>> turtle.get_shapepoly()
((50, -20), (30, 20), (-50, 20), (-30, -20))
"""
shape = self.screen._shapes[self.turtle.shapeIndex]
if shape._type == "polygon":
return self._getshapepoly(shape._data, shape._type == "compound")
# else return None
def _getshapepoly(self, polygon, compound=False):
"""Calculate transformed shape polygon according to resizemode
and shapetransform.
"""
if self._resizemode == "user" or compound:
t11, t12, t21, t22 = self._shapetrafo
elif self._resizemode == "auto":
l = max(1, self._pensize/5.0)
t11, t12, t21, t22 = l, 0, 0, l
elif self._resizemode == "noresize":
return polygon
return tuple((t11*x + t12*y, t21*x + t22*y) for (x, y) in polygon)
def _drawturtle(self):
"""Manages the correct rendering of the turtle with respect to
its shape, resizemode, stretch and tilt etc."""
screen = self.screen
shape = screen._shapes[self.turtle.shapeIndex]
ttype = shape._type
titem = self.turtle._item
if self._shown and screen._updatecounter == 0 and screen._tracing > 0:
self._hidden_from_screen = False
tshape = shape._data
if ttype == "polygon":
if self._resizemode == "noresize": w = 1
elif self._resizemode == "auto": w = self._pensize
else: w =self._outlinewidth
shape = self._polytrafo(self._getshapepoly(tshape))
fc, oc = self._fillcolor, self._pencolor
screen._drawpoly(titem, shape, fill=fc, outline=oc,
width=w, top=True)
elif ttype == "image":
screen._drawimage(titem, self._position, tshape)
elif ttype == "compound":
for item, (poly, fc, oc) in zip(titem, tshape):
poly = self._polytrafo(self._getshapepoly(poly, True))
screen._drawpoly(item, poly, fill=self._cc(fc),
outline=self._cc(oc), width=self._outlinewidth, top=True)
else:
if self._hidden_from_screen:
return
if ttype == "polygon":
screen._drawpoly(titem, ((0, 0), (0, 0), (0, 0)), "", "")
elif ttype == "image":
screen._drawimage(titem, self._position,
screen._shapes["blank"]._data)
elif ttype == "compound":
for item in titem:
screen._drawpoly(item, ((0, 0), (0, 0), (0, 0)), "", "")
self._hidden_from_screen = True
def stamp(self):
"""Stamp a copy of the turtleshape onto the canvas and return its id.
No argument.
Stamp a copy of the turtle shape onto the canvas at the current
turtle position. Return a stamp_id for that stamp, which can be
used to delete it by calling clearstamp(stamp_id).
Example (for a Turtle instance named turtle):
>>> turtle.color("blue")
>>> turtle.stamp()
13
>>> turtle.fd(50)
"""
screen = self.screen
shape = screen._shapes[self.turtle.shapeIndex]
ttype = shape._type
tshape = shape._data
if ttype == "polygon":
stitem = screen._createpoly()
if self._resizemode == "noresize": w = 1
elif self._resizemode == "auto": w = self._pensize
else: w =self._outlinewidth
shape = self._polytrafo(self._getshapepoly(tshape))
fc, oc = self._fillcolor, self._pencolor
screen._drawpoly(stitem, shape, fill=fc, outline=oc,
width=w, top=True)
elif ttype == "image":
stitem = screen._createimage("")
screen._drawimage(stitem, self._position, tshape)
elif ttype == "compound":
stitem = []
for element in tshape:
item = screen._createpoly()
stitem.append(item)
stitem = tuple(stitem)
for item, (poly, fc, oc) in zip(stitem, tshape):
poly = self._polytrafo(self._getshapepoly(poly, True))
screen._drawpoly(item, poly, fill=self._cc(fc),
outline=self._cc(oc), width=self._outlinewidth, top=True)
self.stampItems.append(stitem)
self.undobuffer.push(("stamp", stitem))
return stitem
def _clearstamp(self, stampid):
"""does the work for clearstamp() and clearstamps()
"""
if stampid in self.stampItems:
if isinstance(stampid, tuple):
for subitem in stampid:
self.screen._delete(subitem)
else:
self.screen._delete(stampid)
self.stampItems.remove(stampid)
# Delete stampitem from undobuffer if necessary
# if clearstamp is called directly.
item = ("stamp", stampid)
buf = self.undobuffer
if item not in buf.buffer:
return
index = buf.buffer.index(item)
buf.buffer.remove(item)
if index <= buf.ptr:
buf.ptr = (buf.ptr - 1) % buf.bufsize
buf.buffer.insert((buf.ptr+1)%buf.bufsize, [None])
def clearstamp(self, stampid):
"""Delete stamp with given stampid
Argument:
stampid - an integer, must be return value of previous stamp() call.
Example (for a Turtle instance named turtle):
>>> turtle.color("blue")
>>> astamp = turtle.stamp()
>>> turtle.fd(50)
>>> turtle.clearstamp(astamp)
"""
self._clearstamp(stampid)
self._update()
def clearstamps(self, n=None):
"""Delete all or first/last n of turtle's stamps.
Optional argument:
n -- an integer
If n is None, delete all of pen's stamps,
else if n > 0 delete first n stamps
else if n < 0 delete last n stamps.
Example (for a Turtle instance named turtle):
>>> for i in range(8):
... turtle.stamp(); turtle.fd(30)
...
>>> turtle.clearstamps(2)
>>> turtle.clearstamps(-2)
>>> turtle.clearstamps()
"""
if n is None:
toDelete = self.stampItems[:]
elif n >= 0:
toDelete = self.stampItems[:n]
else:
toDelete = self.stampItems[n:]
for item in toDelete:
self._clearstamp(item)
self._update()
def _goto(self, end):
"""Move the pen to the point end, thereby drawing a line
if pen is down. All other methods for turtle movement depend
on this one.
"""
## Version with undo-stuff
go_modes = ( self._drawing,
self._pencolor,
self._pensize,
isinstance(self._fillpath, list))
screen = self.screen
undo_entry = ("go", self._position, end, go_modes,
(self.currentLineItem,
self.currentLine[:],
screen._pointlist(self.currentLineItem),
self.items[:])
)
if self.undobuffer:
self.undobuffer.push(undo_entry)
start = self._position
if self._speed and screen._tracing == 1:
diff = (end-start)
diffsq = (diff[0]*screen.xscale)**2 + (diff[1]*screen.yscale)**2
nhops = 1+int((diffsq**0.5)/(3*(1.1**self._speed)*self._speed))
delta = diff * (1.0/nhops)
for n in range(1, nhops):
if n == 1:
top = True
else:
top = False
self._position = start + delta * n
if self._drawing:
screen._drawline(self.drawingLineItem,
(start, self._position),
self._pencolor, self._pensize, top)
self._update()
if self._drawing:
screen._drawline(self.drawingLineItem, ((0, 0), (0, 0)),
fill="", width=self._pensize)
# Turtle now at end,
if self._drawing: # now update currentLine
self.currentLine.append(end)
if isinstance(self._fillpath, list):
self._fillpath.append(end)
###### vererbung!!!!!!!!!!!!!!!!!!!!!!
self._position = end
if self._creatingPoly:
self._poly.append(end)
if len(self.currentLine) > 42: # 42! answer to the ultimate question
# of life, the universe and everything
self._newLine()
self._update() #count=True)
def _undogoto(self, entry):
"""Reverse a _goto. Used for undo()
"""
old, new, go_modes, coodata = entry
drawing, pc, ps, filling = go_modes
cLI, cL, pl, items = coodata
screen = self.screen
if abs(self._position - new) > 0.5:
print ("undogoto: HALLO-DA-STIMMT-WAS-NICHT!")
# restore former situation
self.currentLineItem = cLI
self.currentLine = cL
if pl == [(0, 0), (0, 0)]:
usepc = ""
else:
usepc = pc
screen._drawline(cLI, pl, fill=usepc, width=ps)
todelete = [i for i in self.items if (i not in items) and
(screen._type(i) == "line")]
for i in todelete:
screen._delete(i)
self.items.remove(i)
start = old
if self._speed and screen._tracing == 1:
diff = old - new
diffsq = (diff[0]*screen.xscale)**2 + (diff[1]*screen.yscale)**2
nhops = 1+int((diffsq**0.5)/(3*(1.1**self._speed)*self._speed))
delta = diff * (1.0/nhops)
for n in range(1, nhops):
if n == 1:
top = True
else:
top = False
self._position = new + delta * n
if drawing:
screen._drawline(self.drawingLineItem,
(start, self._position),
pc, ps, top)
self._update()
if drawing:
screen._drawline(self.drawingLineItem, ((0, 0), (0, 0)),
fill="", width=ps)
# Turtle now at position old,
self._position = old
## if undo is done during creating a polygon, the last vertex
## will be deleted. if the polygon is entirely deleted,
## creatingPoly will be set to False.
## Polygons created before the last one will not be affected by undo()
if self._creatingPoly:
if len(self._poly) > 0:
self._poly.pop()
if self._poly == []:
self._creatingPoly = False
self._poly = None
if filling:
if self._fillpath == []:
self._fillpath = None
print("Unwahrscheinlich in _undogoto!")
elif self._fillpath is not None:
self._fillpath.pop()
self._update() #count=True)
def _rotate(self, angle):
"""Turns pen clockwise by angle.
"""
if self.undobuffer:
self.undobuffer.push(("rot", angle, self._degreesPerAU))
angle *= self._degreesPerAU
neworient = self._orient.rotate(angle)
tracing = self.screen._tracing
if tracing == 1 and self._speed > 0:
anglevel = 3.0 * self._speed
steps = 1 + int(abs(angle)/anglevel)
delta = 1.0*angle/steps
for _ in range(steps):
self._orient = self._orient.rotate(delta)
self._update()
self._orient = neworient
self._update()
def _newLine(self, usePos=True):
"""Closes current line item and starts a new one.
Remark: if current line became too long, animation
performance (via _drawline) slowed down considerably.
"""
if len(self.currentLine) > 1:
self.screen._drawline(self.currentLineItem, self.currentLine,
self._pencolor, self._pensize)
self.currentLineItem = self.screen._createline()
self.items.append(self.currentLineItem)
else:
self.screen._drawline(self.currentLineItem, top=True)
self.currentLine = []
if usePos:
self.currentLine = [self._position]
def filling(self):
"""Return fillstate (True if filling, False else).
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.begin_fill()
>>> if turtle.filling():
... turtle.pensize(5)
... else:
... turtle.pensize(3)
"""
return isinstance(self._fillpath, list)
def begin_fill(self):
"""Called just before drawing a shape to be filled.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.color("black", "red")
>>> turtle.begin_fill()
>>> turtle.circle(60)
>>> turtle.end_fill()
"""
if not self.filling():
self._fillitem = self.screen._createpoly()
self.items.append(self._fillitem)
self._fillpath = [self._position]
self._newLine()
if self.undobuffer:
self.undobuffer.push(("beginfill", self._fillitem))
self._update()
def end_fill(self):
"""Fill the shape drawn after the call begin_fill().
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.color("black", "red")
>>> turtle.begin_fill()
>>> turtle.circle(60)
>>> turtle.end_fill()
"""
if self.filling():
if len(self._fillpath) > 2:
self.screen._drawpoly(self._fillitem, self._fillpath,
fill=self._fillcolor)
if self.undobuffer:
self.undobuffer.push(("dofill", self._fillitem))
self._fillitem = self._fillpath = None
self._update()
def dot(self, size=None, *color):
"""Draw a dot with diameter size, using color.
Optional arguments:
size -- an integer >= 1 (if given)
color -- a colorstring or a numeric color tuple
Draw a circular dot with diameter size, using color.
If size is not given, the maximum of pensize+4 and 2*pensize is used.
Example (for a Turtle instance named turtle):
>>> turtle.dot()
>>> turtle.fd(50); turtle.dot(20, "blue"); turtle.fd(50)
"""
if not color:
if isinstance(size, (str, tuple)):
color = self._colorstr(size)
size = self._pensize + max(self._pensize, 4)
else:
color = self._pencolor
if not size:
size = self._pensize + max(self._pensize, 4)
else:
if size is None:
size = self._pensize + max(self._pensize, 4)
color = self._colorstr(color)
if hasattr(self.screen, "_dot"):
item = self.screen._dot(self._position, size, color)
self.items.append(item)
if self.undobuffer:
self.undobuffer.push(("dot", item))
else:
pen = self.pen()
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
try:
if self.resizemode() == 'auto':
self.ht()
self.pendown()
self.pensize(size)
self.pencolor(color)
self.forward(0)
finally:
self.pen(pen)
if self.undobuffer:
self.undobuffer.cumulate = False
def _write(self, txt, align, font):
"""Performs the writing for write()
"""
item, end = self.screen._write(self._position, txt, align, font,
self._pencolor)
self.items.append(item)
if self.undobuffer:
self.undobuffer.push(("wri", item))
return end
def write(self, arg, move=False, align="left", font=("Arial", 8, "normal")):
"""Write text at the current turtle position.
Arguments:
arg -- info, which is to be written to the TurtleScreen
move (optional) -- True/False
align (optional) -- one of the strings "left", "center" or right"
font (optional) -- a triple (fontname, fontsize, fonttype)
Write text - the string representation of arg - at the current
turtle position according to align ("left", "center" or right")
and with the given font.
If move is True, the pen is moved to the bottom-right corner
of the text. By default, move is False.
Example (for a Turtle instance named turtle):
>>> turtle.write('Home = ', True, align="center")
>>> turtle.write((0,0), True)
"""
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
end = self._write(str(arg), align.lower(), font)
if move:
x, y = self.pos()
self.setpos(end, y)
if self.undobuffer:
self.undobuffer.cumulate = False
def begin_poly(self):
"""Start recording the vertices of a polygon.
No argument.
Start recording the vertices of a polygon. Current turtle position
is first point of polygon.
Example (for a Turtle instance named turtle):
>>> turtle.begin_poly()
"""
self._poly = [self._position]
self._creatingPoly = True
def end_poly(self):
"""Stop recording the vertices of a polygon.
No argument.
Stop recording the vertices of a polygon. Current turtle position is
last point of polygon. This will be connected with the first point.
Example (for a Turtle instance named turtle):
>>> turtle.end_poly()
"""
self._creatingPoly = False
def get_poly(self):
"""Return the lastly recorded polygon.
No argument.
Example (for a Turtle instance named turtle):
>>> p = turtle.get_poly()
>>> turtle.register_shape("myFavouriteShape", p)
"""
## check if there is any poly?
if self._poly is not None:
return tuple(self._poly)
def getscreen(self):
"""Return the TurtleScreen object, the turtle is drawing on.
No argument.
Return the TurtleScreen object, the turtle is drawing on.
So TurtleScreen-methods can be called for that object.
Example (for a Turtle instance named turtle):
>>> ts = turtle.getscreen()
>>> ts
<turtle.TurtleScreen object at 0x0106B770>
>>> ts.bgcolor("pink")
"""
return self.screen
def getturtle(self):
"""Return the Turtleobject itself.
No argument.
Only reasonable use: as a function to return the 'anonymous turtle':
Example:
>>> pet = getturtle()
>>> pet.fd(50)
>>> pet
<turtle.Turtle object at 0x0187D810>
>>> turtles()
[<turtle.Turtle object at 0x0187D810>]
"""
return self
getpen = getturtle
################################################################
### screen oriented methods recurring to methods of TurtleScreen
################################################################
def _delay(self, delay=None):
"""Set delay value which determines speed of turtle animation.
"""
return self.screen.delay(delay)
def onclick(self, fun, btn=1, add=None):
"""Bind fun to mouse-click event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
add -- True or False. If True, new binding will be added, otherwise
it will replace a former binding.
Example for the anonymous turtle, i. e. the procedural way:
>>> def turn(x, y):
... left(360)
...
>>> onclick(turn) # Now clicking into the turtle will turn it.
>>> onclick(None) # event-binding will be removed
"""
self.screen._onclick(self.turtle._item, fun, btn, add)
self._update()
def onrelease(self, fun, btn=1, add=None):
"""Bind fun to mouse-button-release event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
Example (for a MyTurtle instance named joe):
>>> class MyTurtle(Turtle):
... def glow(self,x,y):
... self.fillcolor("red")
... def unglow(self,x,y):
... self.fillcolor("")
...
>>> joe = MyTurtle()
>>> joe.onclick(joe.glow)
>>> joe.onrelease(joe.unglow)
Clicking on joe turns fillcolor red, unclicking turns it to
transparent.
"""
self.screen._onrelease(self.turtle._item, fun, btn, add)
self._update()
def ondrag(self, fun, btn=1, add=None):
"""Bind fun to mouse-move event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
Every sequence of mouse-move-events on a turtle is preceded by a
mouse-click event on that turtle.
Example (for a Turtle instance named turtle):
>>> turtle.ondrag(turtle.goto)
Subsequently clicking and dragging a Turtle will move it
across the screen thereby producing handdrawings (if pen is
down).
"""
self.screen._ondrag(self.turtle._item, fun, btn, add)
def _undo(self, action, data):
"""Does the main part of the work for undo()
"""
if self.undobuffer is None:
return
if action == "rot":
angle, degPAU = data
self._rotate(-angle*degPAU/self._degreesPerAU)
dummy = self.undobuffer.pop()
elif action == "stamp":
stitem = data[0]
self.clearstamp(stitem)
elif action == "go":
self._undogoto(data)
elif action in ["wri", "dot"]:
item = data[0]
self.screen._delete(item)
self.items.remove(item)
elif action == "dofill":
item = data[0]
self.screen._drawpoly(item, ((0, 0),(0, 0),(0, 0)),
fill="", outline="")
elif action == "beginfill":
item = data[0]
self._fillitem = self._fillpath = None
if item in self.items:
self.screen._delete(item)
self.items.remove(item)
elif action == "pen":
TPen.pen(self, data[0])
self.undobuffer.pop()
def undo(self):
"""undo (repeatedly) the last turtle action.
No argument.
undo (repeatedly) the last turtle action.
Number of available undo actions is determined by the size of
the undobuffer.
Example (for a Turtle instance named turtle):
>>> for i in range(4):
... turtle.fd(50); turtle.lt(80)
...
>>> for i in range(8):
... turtle.undo()
...
"""
if self.undobuffer is None:
return
item = self.undobuffer.pop()
action = item[0]
data = item[1:]
if action == "seq":
while data:
item = data.pop()
self._undo(item[0], item[1:])
else:
self._undo(action, data)
turtlesize = shapesize
RawPen = RawTurtle
def Screen():
"""Return the singleton screen object.
If none exists at the moment, create a new one and return it,
else return the existing one."""
if Turtle._screen is None:
Turtle._screen = _Screen()
return Turtle._screen
class _Screen(TurtleScreen):
_root = None
_canvas = None
_title = _CFG["title"]
def __init__(self):
# XXX there is no need for this code to be conditional,
# as there will be only a single _Screen instance, anyway
# XXX actually, the turtle demo is injecting root window,
# so perhaps the conditional creation of a root should be
# preserved (perhaps by passing it as an optional parameter)
if _Screen._root is None:
_Screen._root = self._root = _Root()
self._root.title(_Screen._title)
self._root.ondestroy(self._destroy)
if _Screen._canvas is None:
width = _CFG["width"]
height = _CFG["height"]
canvwidth = _CFG["canvwidth"]
canvheight = _CFG["canvheight"]
leftright = _CFG["leftright"]
topbottom = _CFG["topbottom"]
self._root.setupcanvas(width, height, canvwidth, canvheight)
_Screen._canvas = self._root._getcanvas()
TurtleScreen.__init__(self, _Screen._canvas)
self.setup(width, height, leftright, topbottom)
def setup(self, width=_CFG["width"], height=_CFG["height"],
startx=_CFG["leftright"], starty=_CFG["topbottom"]):
""" Set the size and position of the main window.
Arguments:
width: as integer a size in pixels, as float a fraction of the screen.
Default is 50% of screen.
height: as integer the height in pixels, as float a fraction of the
screen. Default is 75% of screen.
startx: if positive, starting position in pixels from the left
edge of the screen, if negative from the right edge
Default, startx=None is to center window horizontally.
starty: if positive, starting position in pixels from the top
edge of the screen, if negative from the bottom edge
Default, starty=None is to center window vertically.
Examples (for a Screen instance named screen):
>>> screen.setup (width=200, height=200, startx=0, starty=0)
sets window to 200x200 pixels, in upper left of screen
>>> screen.setup(width=.75, height=0.5, startx=None, starty=None)
sets window to 75% of screen by 50% of screen and centers
"""
if not hasattr(self._root, "set_geometry"):
return
sw = self._root.win_width()
sh = self._root.win_height()
if isinstance(width, float) and 0 <= width <= 1:
width = sw*width
if startx is None:
startx = (sw - width) / 2
if isinstance(height, float) and 0 <= height <= 1:
height = sh*height
if starty is None:
starty = (sh - height) / 2
self._root.set_geometry(width, height, startx, starty)
self.update()
def title(self, titlestring):
"""Set title of turtle-window
Argument:
titlestring -- a string, to appear in the titlebar of the
turtle graphics window.
This is a method of Screen-class. Not available for TurtleScreen-
objects.
Example (for a Screen instance named screen):
>>> screen.title("Welcome to the turtle-zoo!")
"""
if _Screen._root is not None:
_Screen._root.title(titlestring)
_Screen._title = titlestring
def _destroy(self):
root = self._root
if root is _Screen._root:
Turtle._pen = None
Turtle._screen = None
_Screen._root = None
_Screen._canvas = None
TurtleScreen._RUNNING = False
root.destroy()
def bye(self):
"""Shut the turtlegraphics window.
Example (for a TurtleScreen instance named screen):
>>> screen.bye()
"""
self._destroy()
def exitonclick(self):
"""Go into mainloop until the mouse is clicked.
No arguments.
Bind bye() method to mouseclick on TurtleScreen.
If "using_IDLE" - value in configuration dictionary is False
(default value), enter mainloop.
If IDLE with -n switch (no subprocess) is used, this value should be
set to True in turtle.cfg. In this case IDLE's mainloop
is active also for the client script.
This is a method of the Screen-class and not available for
TurtleScreen instances.
Example (for a Screen instance named screen):
>>> screen.exitonclick()
"""
def exitGracefully(x, y):
"""Screen.bye() with two dummy-parameters"""
self.bye()
self.onclick(exitGracefully)
if _CFG["using_IDLE"]:
return
try:
mainloop()
except AttributeError:
exit(0)
class Turtle(RawTurtle):
"""RawTurtle auto-creating (scrolled) canvas.
When a Turtle object is created or a function derived from some
Turtle method is called a TurtleScreen object is automatically created.
"""
_pen = None
_screen = None
def __init__(self,
shape=_CFG["shape"],
undobuffersize=_CFG["undobuffersize"],
visible=_CFG["visible"]):
if Turtle._screen is None:
Turtle._screen = Screen()
RawTurtle.__init__(self, Turtle._screen,
shape=shape,
undobuffersize=undobuffersize,
visible=visible)
Pen = Turtle
def write_docstringdict(filename="turtle_docstringdict"):
"""Create and write docstring-dictionary to file.
Optional argument:
filename -- a string, used as filename
default value is turtle_docstringdict
Has to be called explicitly, (not used by the turtle-graphics classes)
The docstring dictionary will be written to the Python script <filname>.py
It is intended to serve as a template for translation of the docstrings
into different languages.
"""
docsdict = {}
for methodname in _tg_screen_functions:
key = "_Screen."+methodname
docsdict[key] = eval(key).__doc__
for methodname in _tg_turtle_functions:
key = "Turtle."+methodname
docsdict[key] = eval(key).__doc__
with open("%s.py" % filename,"w") as f:
keys = sorted(x for x in docsdict.keys()
if x.split('.')[1] not in _alias_list)
f.write('docsdict = {\n\n')
for key in keys[:-1]:
f.write('%s :\n' % repr(key))
f.write(' """%s\n""",\n\n' % docsdict[key])
key = keys[-1]
f.write('%s :\n' % repr(key))
f.write(' """%s\n"""\n\n' % docsdict[key])
f.write("}\n")
f.close()
def read_docstrings(lang):
"""Read in docstrings from lang-specific docstring dictionary.
Transfer docstrings, translated to lang, from a dictionary-file
to the methods of classes Screen and Turtle and - in revised form -
to the corresponding functions.
"""
modname = "turtle_docstringdict_%(language)s" % {'language':lang.lower()}
module = __import__(modname)
docsdict = module.docsdict
for key in docsdict:
try:
eval(key).__doc__ = docsdict[key]
except Exception:
print("Bad docstring-entry: %s" % key)
_LANGUAGE = _CFG["language"]
try:
if _LANGUAGE != "english":
read_docstrings(_LANGUAGE)
except ImportError:
print("Cannot find docsdict for", _LANGUAGE)
except Exception:
print ("Unknown Error when trying to import %s-docstring-dictionary" %
_LANGUAGE)
def getmethparlist(ob):
"""Get strings describing the arguments for the given object
Returns a pair of strings representing function parameter lists
including parenthesis. The first string is suitable for use in
function definition and the second is suitable for use in function
call. The "self" parameter is not included.
"""
defText = callText = ""
# bit of a hack for methods - turn it into a function
# but we drop the "self" param.
# Try and build one for Python defined functions
args, varargs, varkw = inspect.getargs(ob.__code__)
items2 = args[1:]
realArgs = args[1:]
defaults = ob.__defaults__ or []
defaults = ["=%r" % (value,) for value in defaults]
defaults = [""] * (len(realArgs)-len(defaults)) + defaults
items1 = [arg + dflt for arg, dflt in zip(realArgs, defaults)]
if varargs is not None:
items1.append("*" + varargs)
items2.append("*" + varargs)
if varkw is not None:
items1.append("**" + varkw)
items2.append("**" + varkw)
defText = ", ".join(items1)
defText = "(%s)" % defText
callText = ", ".join(items2)
callText = "(%s)" % callText
return defText, callText
def _turtle_docrevise(docstr):
"""To reduce docstrings from RawTurtle class for functions
"""
import re
if docstr is None:
return None
turtlename = _CFG["exampleturtle"]
newdocstr = docstr.replace("%s." % turtlename,"")
parexp = re.compile(r' \(.+ %s\):' % turtlename)
newdocstr = parexp.sub(":", newdocstr)
return newdocstr
def _screen_docrevise(docstr):
"""To reduce docstrings from TurtleScreen class for functions
"""
import re
if docstr is None:
return None
screenname = _CFG["examplescreen"]
newdocstr = docstr.replace("%s." % screenname,"")
parexp = re.compile(r' \(.+ %s\):' % screenname)
newdocstr = parexp.sub(":", newdocstr)
return newdocstr
__func_body = """\
def {name}{paramslist}:
if {obj} is None:
if not TurtleScreen._RUNNING:
TurtleScreen._RUNNING = True
raise Terminator
{obj} = {init}
try:
return {obj}.{name}{argslist}
except TK.TclError:
if not TurtleScreen._RUNNING:
TurtleScreen._RUNNING = True
raise Terminator
raise
"""
def _make_global_funcs(functions, cls, obj, init, docrevise):
for methodname in functions:
method = getattr(cls, methodname)
pl1, pl2 = getmethparlist(method)
if pl1 == "":
print(">>>>>>", pl1, pl2)
continue
defstr = __func_body.format(obj=obj, init=init, name=methodname,
paramslist=pl1, argslist=pl2)
exec(defstr, globals())
globals()[methodname].__doc__ = docrevise(method.__doc__)
_make_global_funcs(_tg_screen_functions, _Screen,
'Turtle._screen', 'Screen()', _screen_docrevise)
_make_global_funcs(_tg_turtle_functions, Turtle,
'Turtle._pen', 'Turtle()', _turtle_docrevise)
done = mainloop
if __name__ == "__main__":
def switchpen():
if isdown():
pu()
else:
pd()
def demo1():
"""Demo of old turtle.py - module"""
reset()
tracer(True)
up()
backward(100)
down()
# draw 3 squares; the last filled
width(3)
for i in range(3):
if i == 2:
begin_fill()
for _ in range(4):
forward(20)
left(90)
if i == 2:
color("maroon")
end_fill()
up()
forward(30)
down()
width(1)
color("black")
# move out of the way
tracer(False)
up()
right(90)
forward(100)
right(90)
forward(100)
right(180)
down()
# some text
write("startstart", 1)
write("start", 1)
color("red")
# staircase
for i in range(5):
forward(20)
left(90)
forward(20)
right(90)
# filled staircase
tracer(True)
begin_fill()
for i in range(5):
forward(20)
left(90)
forward(20)
right(90)
end_fill()
# more text
def demo2():
"""Demo of some new features."""
speed(1)
st()
pensize(3)
setheading(towards(0, 0))
radius = distance(0, 0)/2.0
rt(90)
for _ in range(18):
switchpen()
circle(radius, 10)
write("wait a moment...")
while undobufferentries():
undo()
reset()
lt(90)
colormode(255)
laenge = 10
pencolor("green")
pensize(3)
lt(180)
for i in range(-2, 16):
if i > 0:
begin_fill()
fillcolor(255-15*i, 0, 15*i)
for _ in range(3):
fd(laenge)
lt(120)
end_fill()
laenge += 10
lt(15)
speed((speed()+1)%12)
#end_fill()
lt(120)
pu()
fd(70)
rt(30)
pd()
color("red","yellow")
speed(0)
begin_fill()
for _ in range(4):
circle(50, 90)
rt(90)
fd(30)
rt(90)
end_fill()
lt(90)
pu()
fd(30)
pd()
shape("turtle")
tri = getturtle()
tri.resizemode("auto")
turtle = Turtle()
turtle.resizemode("auto")
turtle.shape("turtle")
turtle.reset()
turtle.left(90)
turtle.speed(0)
turtle.up()
turtle.goto(280, 40)
turtle.lt(30)
turtle.down()
turtle.speed(6)
turtle.color("blue","orange")
turtle.pensize(2)
tri.speed(6)
setheading(towards(turtle))
count = 1
while tri.distance(turtle) > 4:
turtle.fd(3.5)
turtle.lt(0.6)
tri.setheading(tri.towards(turtle))
tri.fd(4)
if count % 20 == 0:
turtle.stamp()
tri.stamp()
switchpen()
count += 1
tri.write("CAUGHT! ", font=("Arial", 16, "bold"), align="right")
tri.pencolor("black")
tri.pencolor("red")
def baba(xdummy, ydummy):
clearscreen()
bye()
time.sleep(2)
while undobufferentries():
tri.undo()
turtle.undo()
tri.fd(50)
tri.write(" Click me!", font = ("Courier", 12, "bold") )
tri.onclick(baba, 1)
demo1()
demo2()
exitonclick()
|
from collections import Iterable
from . import ResourcesQueryBuilder
from ...models.resource import Resource
from ...models.employee import Employee
from ...models.position import Position
from ...models.appointment import Appointment
from ..bl.structures import query_recursive_tree
class AppointmentsQueryBuilder(ResourcesQueryBuilder):
def __init__(self, context):
super(AppointmentsQueryBuilder, self).__init__(context)
self._subq_structures_recursive = query_recursive_tree().subquery()
self._fields = {
'id': Appointment.id,
'_id': Appointment.id,
'date': Appointment.date,
'employee_name': Employee.name,
'position_name': Position.name,
'structure_path': self._subq_structures_recursive.c.name_path
}
self._simple_search_fields = [
Employee.first_name,
Employee.last_name,
Position.name,
self._subq_structures_recursive.c.name,
]
self.build_query()
def build_query(self):
self.build_base_query()
self.query = (
self.query
.join(Appointment, Resource.appointment)
.join(Position, Appointment.position)
.join(Employee, Appointment.employee)
.join(
self._subq_structures_recursive,
self._subq_structures_recursive.c.id == Position.structure_id
)
)
super(AppointmentsQueryBuilder, self).build_query()
def filter_id(self, id):
assert isinstance(id, Iterable), u"Must be iterable object"
if id:
self.query = self.query.filter(Appointment.id.in_(id))
|
from datetime import datetime
from richenum import OrderedRichEnum, OrderedRichEnumValue
from ..common.rainbow import rainbow
from ..common.settings import CONFIG
class LoggingLevel(OrderedRichEnum):
DEBUG = OrderedRichEnumValue(index=1, canonical_name='DEBUG', display_name='DEBUG')
INFO = OrderedRichEnumValue(index=2, canonical_name='INFO', display_name='INFO')
WARNING = OrderedRichEnumValue(index=3, canonical_name='WARNING', display_name='WARNING')
ERROR = OrderedRichEnumValue(index=4, canonical_name='ERROR', display_name='ERROR')
FATAL = OrderedRichEnumValue(index=5, canonical_name='FATAL', display_name='FATAL')
colors = {
"DEBUG": "cyan",
"INFO": "blue",
"WARNING": "magenta",
"ERROR": "bred",
"FATAL": "red",
}
class Logger:
@staticmethod
def log(*msgs, level=LoggingLevel.INFO):
if level >= getattr(LoggingLevel, CONFIG.LOG_LEVEL.upper()):
print("[{level}] {dt}".format(
level=getattr(rainbow, colors[level.display_name])(level.display_name),
dt=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
), *msgs)
@classmethod
def debug(cls, *msgs):
return cls.log(*msgs, level=LoggingLevel.DEBUG)
@classmethod
def info(cls, *msgs):
return cls.log(*msgs, level=LoggingLevel.INFO)
@classmethod
def warn(cls, *msgs):
return cls.log(*msgs, level=LoggingLevel.WARNING)
@classmethod
def error(cls, *msgs):
return cls.log(*msgs, level=LoggingLevel.ERROR)
@classmethod
def fatal(cls, *msgs):
return cls.log(*msgs, level=LoggingLevel.FATAL)
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_guest_boot_facts
short_description: Gather facts about boot options for the given virtual machine
description:
- This module can be used to gather facts aboyt boot options for the given virtual machine.
version_added: 2.7
author:
- Abhijeet Kasurde (@Akasurde) <akasurde@redhat.com>
notes:
- Tested on vSphere 6.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
name:
description:
- Name of the VM to work with.
- This is required if C(uuid) parameter is not supplied.
uuid:
description:
- UUID of the instance to manage if known, this is VMware's BIOS UUID by default.
- This is required if C(name) parameter is not supplied.
use_instance_uuid:
description:
- Whether to use the VMware instance UUID rather than the BIOS UUID.
default: no
type: bool
version_added: '2.8'
name_match:
description:
- If multiple virtual machines matching the name, use the first or last found.
default: 'first'
choices: ['first', 'last']
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather facts about virtual machine's boot order and related parameters
vmware_guest_boot_facts:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
validate_certs: no
name: "{{ vm_name }}"
register: vm_boot_order_facts
'''
RETURN = r"""
vm_boot_facts:
description: metadata about boot order of virtual machine
returned: always
type: dict
sample: {
"current_boot_order": [
"floppy",
"disk",
"ethernet",
"cdrom"
],
"current_boot_delay": 2000,
"current_boot_retry_delay": 22300,
"current_boot_retry_enabled": true,
"current_enter_bios_setup": true,
"current_boot_firmware": "bios",
"current_secure_boot_enabled": false,
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec, find_vm_by_id
try:
from pyVmomi import vim
except ImportError:
pass
class VmBootFactsManager(PyVmomi):
def __init__(self, module):
super(VmBootFactsManager, self).__init__(module)
self.name = self.params['name']
self.uuid = self.params['uuid']
self.use_instance_uuid = self.params['use_instance_uuid']
self.vm = None
def _get_vm(self):
vms = []
if self.uuid:
if self.use_instance_uuid:
vm_obj = find_vm_by_id(self.content, vm_id=self.uuid, vm_id_type="use_instance_uuid")
else:
vm_obj = find_vm_by_id(self.content, vm_id=self.uuid, vm_id_type="uuid")
if vm_obj is None:
self.module.fail_json(msg="Failed to find the virtual machine with UUID : %s" % self.uuid)
vms = [vm_obj]
elif self.name:
objects = self.get_managed_objects_properties(vim_type=vim.VirtualMachine, properties=['name'])
for temp_vm_object in objects:
if temp_vm_object.obj.name == self.name:
vms.append(temp_vm_object.obj)
if vms:
if self.params.get('name_match') == 'first':
self.vm = vms[0]
elif self.params.get('name_match') == 'last':
self.vm = vms[-1]
else:
self.module.fail_json(msg="Failed to find virtual machine using %s" % (self.name or self.uuid))
@staticmethod
def humanize_boot_order(boot_order):
results = []
for device in boot_order:
if isinstance(device, vim.vm.BootOptions.BootableCdromDevice):
results.append('cdrom')
elif isinstance(device, vim.vm.BootOptions.BootableDiskDevice):
results.append('disk')
elif isinstance(device, vim.vm.BootOptions.BootableEthernetDevice):
results.append('ethernet')
elif isinstance(device, vim.vm.BootOptions.BootableFloppyDevice):
results.append('floppy')
return results
def ensure(self):
self._get_vm()
results = dict()
if self.vm and self.vm.config:
results = dict(
current_boot_order=self.humanize_boot_order(self.vm.config.bootOptions.bootOrder),
current_boot_delay=self.vm.config.bootOptions.bootDelay,
current_enter_bios_setup=self.vm.config.bootOptions.enterBIOSSetup,
current_boot_retry_enabled=self.vm.config.bootOptions.bootRetryEnabled,
current_boot_retry_delay=self.vm.config.bootOptions.bootRetryDelay,
current_boot_firmware=self.vm.config.firmware,
current_secure_boot_enabled=self.vm.config.bootOptions.efiSecureBootEnabled
)
self.module.exit_json(changed=False, vm_boot_facts=results)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
name=dict(type='str'),
uuid=dict(type='str'),
use_instance_uuid=dict(type='bool', default=False),
name_match=dict(
choices=['first', 'last'],
default='first'
),
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['name', 'uuid']
],
mutually_exclusive=[
['name', 'uuid']
],
supports_check_mode=True,
)
pyv = VmBootFactsManager(module)
pyv.ensure()
if __name__ == '__main__':
main()
|
"""
Django settings for djangoStarter project.
Generated by 'django-admin startproject' using Django 1.10.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'ulu)a&x$#pjc)vxjd&sn&lo_3nd9er$b!fjtb#ky=btjnid25q'
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'djangoStarter.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'djangoStarter.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
|
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from oioioi.base.utils.deps import check_django_app_dependencies
from oioioi.base.utils import generate_key
from oioioi.contests.models import Contest
check_django_app_dependencies(__name__, ['oioioi.participants'])
class Teacher(models.Model):
user = models.OneToOneField(User, primary_key=True, verbose_name=_("user"))
is_active = models.BooleanField(default=False, verbose_name=_("active"))
school = models.CharField(max_length=255, verbose_name=_("school"))
class Meta(object):
permissions = (
('teacher', _("Is a teacher")),
)
def __unicode__(self):
return unicode(self.user)
class ContestTeacher(models.Model):
contest = models.ForeignKey(Contest)
teacher = models.ForeignKey(Teacher)
class Meta(object):
unique_together = ('contest', 'teacher')
def __unicode__(self):
return u'%s/%s' % (self.contest_id, self.teacher.user)
class RegistrationConfig(models.Model):
contest = models.OneToOneField(Contest, primary_key=True)
is_active_pupil = models.BooleanField(default=True)
is_active_teacher = models.BooleanField(default=True)
pupil_key = models.CharField(max_length=40)
teacher_key = models.CharField(max_length=40)
def __init__(self, *args, **kwargs):
super(RegistrationConfig, self).__init__(*args, **kwargs)
if not self.teacher_key:
self.teacher_key = generate_key()
if not self.pupil_key:
self.pupil_key = generate_key()
|
import logging
import sys
from snapcraft.internal.indicators import is_dumb_terminal
class _StdoutFilter(logging.Filter):
def filter(self, record):
return record.levelno <= logging.INFO
class _StderrFilter(logging.Filter):
def filter(self, record):
return record.levelno >= logging.WARNING
class _ColoredFormatter(logging.Formatter):
RESET = "\033[0m"
LEVEL_COLORS = {
"INFO": "\033[0;32m", # Green
"WARNING": "\033[1;33m", # Yellow
"ERROR": "\033[0;31m", # Dark red
"CRITICAL": "\033[1;31m", # Light red
}
def format(self, record):
color = self.LEVEL_COLORS.get(record.levelname, None)
log_message = super().format(record)
if color:
return "{color}{message}{reset}".format(
color=color, message=log_message, reset=self.RESET
)
return log_message
def configure(logger_name=None, log_level=None):
if not log_level:
log_level = logging.INFO
stdout_handler = logging.StreamHandler(stream=sys.stdout)
stdout_handler.addFilter(_StdoutFilter())
stderr_handler = logging.StreamHandler(stream=sys.stderr)
stderr_handler.addFilter(_StderrFilter())
handlers = [stdout_handler, stderr_handler]
if is_dumb_terminal():
formatter = logging.Formatter(style="{")
else:
formatter = _ColoredFormatter(style="{")
logger = logging.getLogger(logger_name)
for handler in handlers:
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(log_level)
# INFO by default for the requests lib as it is too noisy
if log_level == logging.DEBUG:
logging.getLogger("requests").setLevel(log_level)
else:
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("gnupg").setLevel(logging.WARNING)
|
""" DB is a base class for multiple DIRAC databases that are based on MySQL.
It uniforms the way the database objects are constructed
"""
__RCSID__ = "$Id$"
from DIRAC import gLogger, gConfig
from DIRAC.Core.Utilities.MySQL import MySQL
from DIRAC.ConfigurationSystem.Client.Utilities import getDBParameters
from DIRAC.ConfigurationSystem.Client.PathFinder import getDatabaseSection
class DB( MySQL ):
def __init__( self, dbname, fullname, debug = False ):
self.fullname = fullname
database_name = dbname
self.log = gLogger.getSubLogger( database_name )
result = getDBParameters( fullname )
if not result['OK'] :
raise Exception( 'Cannot get database parameters: %s' % result['Message'] )
dbParameters = result[ 'Value' ]
self.dbHost = dbParameters[ 'Host' ]
self.dbPort = dbParameters[ 'Port' ]
self.dbUser = dbParameters[ 'User' ]
self.dbPass = dbParameters[ 'Password' ]
self.dbName = dbParameters[ 'DBName' ]
MySQL.__init__( self, self.dbHost, self.dbUser, self.dbPass,
self.dbName, self.dbPort, debug = debug )
if not self._connected:
raise RuntimeError( 'Can not connect to DB %s, exiting...' % self.dbName )
self.log.info( "==================================================" )
#self.log.info("SystemInstance: "+self.system)
self.log.info( "User: " + self.dbUser )
self.log.info( "Host: " + self.dbHost )
self.log.info( "Port: " + str( self.dbPort ) )
#self.log.info("Password: "+self.dbPass)
self.log.info( "DBName: " + self.dbName )
self.log.info( "==================================================" )
def getCSOption( self, optionName, defaultValue = None ):
cs_path = getDatabaseSection( self.fullname )
return gConfig.getValue( "/%s/%s" % ( cs_path, optionName ), defaultValue )
|
class FB_Prod2Prog:
__Prod2Prog_ID = "" #PROD2PROG_ID
__Product_ID = "" #PRODUCT_ID
__Program_ID = "" #PROGRAM_ID
__Prod2Prog_Status_Code = "" #PROD2PROG_STATUS_CODE
__Reg_Number = "" #REGISTRATION_NUMBER
__Reg_Year = "" #REGISTRATION_YEAR
__Orig_Reg_Number = "" #ORIGINAL_REGISTRATION_NUMBER
__Orig_Reg_Year = "" #ORIGINAL_REGISTRATION_YEAR
__Reg_TS = "" #REGISTRATION_TS
def __init__(self):
pass
def setProd2Prog(self,Index, Value):
if(Index == 1):
self.__Prod2Prog_ID = Value
elif(Index == 2):
self.__Product_ID = Value
elif(Index == 3):
self.__Program_ID = Value
elif(Index == 4):
self.__Prod2Prog_Status_Code = Value
elif(Index == 5):
self.__Reg_Number = Value
elif(Index == 6):
self.__Reg_Year = Value
elif(Index == 7):
self.__Orig_Reg_Number = Value
elif(Index == 8):
self.__Orig_Reg_Year = Value
elif(Index == 9):
self.__Reg_TS = Value
def getProd2Prog(self,Index):
if(Index == 1):
return self.__Prod2Prog_ID
elif(Index == 2):
return self.__Product_ID
elif(Index == 3):
return self.__Program_ID
elif(Index == 4):
return self.__Prod2Prog_Status_Code
elif(Index == 5):
return self.__Reg_Number
elif(Index == 6):
return self.__Reg_Year
elif(Index == 7):
return self.__Orig_Reg_Number
elif(Index == 8):
return self.__Orig_Reg_Year
elif(Index == 9):
return self.__Reg_TS
#Handling PROD2PROG_ID
def setProd2ProgID(self,PP_ID):
self.__Prod2Prog_ID = PP_ID
def getProd2ProgID(self):
return self.__Prod2Prog_ID
#Handling Product ID
def setProductID(self,P_ID):
self.__Product_ID = P_ID
def getProductID(self):
return self.__Product_ID
#Handling PROGRAM_ID
def setProgramID(self,Pr_ID):
self.__Program_ID = Pr_ID
def getProgramID(self):
return self.__Program_ID
#Handling PROD2PROG_STATUS_CODE
def setProd2ProgStatusCode(self,S_Code):
self.__Prod2Prog_Status_Code = S_Code
def getProd2ProgStatusCode(self):
return self.__Prod2Prog_Status_Code
#Handling REGISTRATION_NUMBER
def setRegNumber(self,R_Number):
self.__Reg_Number = R_Number
def getRegNumber(self):
return self.__Reg_Number
#Handling REGISTRATION_YEAR
def setRegYear(self,R_Year):
self.__Reg_Year = R_Year
def getRegYear(self):
return self.__Reg_Year
#Handling ORIGINAL_REGISTRATION_NUMBER
def setOrigRegNumber(self,Orig_RegNumber):
self.__Orig_Reg_Number = Orig_RegNumber
def getOrigRegNumber(self):
return self.__Orig_Reg_Number
#Handling ORIGINAL_REGISTRATION_YEAR
def setOrigRegYear(self,Orig_RegYear):
self.__Orig_Reg_Year = Orig_RegYear
def getOrigRegYear(self):
return self.__Orig_Reg_Year
#Handling REGISTRATION_TS
def setRegTS(self,RegTS):
self.__Reg_TS = RegTS
def getRegTS(self):
return self.__Reg_TS
|
import re
import etl_plugin_core
from numerizer import numerize
class enhance_extract_money(etl_plugin_core.Plugin):
# todo: all other currency signs from Wikidata
currency_signs = ['$', '€']
def process(self, parameters=None, data=None):
if parameters is None:
parameters = {}
if data is None:
data = {}
moneys = set(data.get('money_ss', []))
text = etl_plugin_core.get_text(data)
text = text.replace("\n", " ")
# convert written numbers like "one" and "two million" to integer like "1" and "2000000"
if 'language_s' in data:
if data['language_s'] == "en":
text = numerize(text)
currencies_escaped = []
# currency signs
for currency in self.currency_signs:
currencies_escaped.append(re.escape(currency))
# currency labels
matched_currency_labels = etl_plugin_core.get_all_matchtexts(data.get('currency_ss_matchtext_ss', []))
for currency_id in matched_currency_labels:
#get only matchtext (without ID/URI of matching entity)
for matchtext in matched_currency_labels[currency_id]:
currencies_escaped.append(re.escape(matchtext))
regex_part_number = '\d+((\.|\,)\d+)*'
regex_part_currencies = '(' + '|'.join(currencies_escaped) + ')'
rule = regex_part_number + '\s?' + regex_part_currencies
for match in re.finditer(rule, text, re.IGNORECASE):
moneys.add(match.group(0))
rule = regex_part_currencies + '\s?' + regex_part_number
for match in re.finditer(rule, text, re.IGNORECASE):
moneys.add(match.group(0))
data['money_ss'] = list(moneys)
return parameters, data
|
"""
Created on Fri Mar 16 20:30:04 2018
@author: chrisstrods
"""
import pandas as pd
from os.path import dirname, abspath
try:
import shared_functions as f
except ModuleNotFoundError:
from etl import shared_functions as f
def main():
#load files
d = dirname(dirname(abspath(__file__)))
summaries = pd.read_csv(d+"/staging/match_summaries.csv")
player_stats = pd.read_csv(d+"/staging/player_stats.csv",low_memory=False)
odds = pd.read_csv(d+"/staging/odds_data.csv")
fantasy = pd.read_csv(d+"/staging/fantasy_scores.csv")
adv_stats = pd.read_csv(d+"/staging/adv_stats.csv")
quarters = pd.read_csv(d+"/staging/q_lengths.csv")
progression = pd.read_csv(d+"/staging/scoring_progression.csv")
#Drop any records which have been scraped twice
summaries.drop_duplicates(inplace=True)
player_stats.drop_duplicates(inplace=True)
odds.drop_duplicates(inplace=True)
fantasy.drop_duplicates(inplace=True)
adv_stats.drop_duplicates(inplace=True)
quarters.drop_duplicates(inplace=True)
progression.drop_duplicates(inplace=True)
#######
#
# PROCESS PLAYER STATS AND MATCH SUMMARIES
#
#######
#Generate columns for odds file
odds["hometeam"] = odds.apply(f.nameFormat, col="hometeam", axis=1)
odds["awayteam"] = odds.apply(f.nameFormat, col="awayteam", axis=1)
odds["year"] = odds.apply(f.getYear, axis=1)
odds["round"] = odds.apply(f.fixFinalsRounds,axis=1)
odds["matchid"] = odds.apply(f.getMatchID, axis=1)
#Summaries update for GF replays
summaries["matchid"] = summaries.apply(f.replayFix,axis=1)
summaries["round"] = summaries.apply(f.roundFix,axis=1)
#Merge fantasy with odds to get match details
fantasy = pd.merge(fantasy,odds,how="left",on="gameID")
#fantasy.drop(["gameID"], axis=1, inplace=True)
#Generate merge columns to get join key for fantasy file
fantasy["fullname"] = fantasy.apply(f.nameClean,axis=1)
fantasy["namekey"] = fantasy.apply(f.getNameKeyFW,axis=1)
fantasy["fullkey"] = fantasy.apply(f.getFullKey,axis=1)
adv_stats["shortname"] = adv_stats.apply(f.shortName,axis=1)
fantasy["shortname"] = fantasy.apply(f.shortName,axis=1)
fw_data = fantasy.merge(adv_stats,on=["gameID","shortname"],how='inner')
fw_data.drop(["round","date","time","homeodds","homeline","awayodds", \
"awayline","hometeam","awayteam","year","homeAway_y","name_y"], \
axis=1,inplace=True)
fw_data.rename(columns={'name_x':'name',
'homeAway_x':'homeAway'},inplace=True)
#Generate merge columns to get join key for player stats file
player_stats["namekey"] = player_stats.apply(f.getNameKeyAT,axis=1)
player_stats["fullkey"] = player_stats.apply(f.getFullKey,axis=1)
#Make manual adjustments for discrepancies
player_stats["fullkey"] = player_stats.apply(f.fixFullName,axis=1)
#Join match summaries with odds file to get all match data
full_summaries = pd.merge(summaries,odds,how="left",on="matchid")
#Join player stats with fantasy file and advanced stats file to get all player data
#player_temp = pd.merge(player_stats,fantasy,how="left",on="fullkey")
player_full = pd.merge(player_stats,fw_data,how="left",on="fullkey")
#Rename columns in full player file and remove uneeded ones
player_full.rename(columns={'matchid_x':'matchid', \
'kicks_x':'kicks',\
'homeAway_x':'homeAway',\
'name_x':'name',\
'fullname_x':'fullname',\
'namekey_x':'namekey'},inplace=True)
player_full.drop(["namekey_y","matchid_y","kicks_y"], \
axis=1,inplace=True)
#Rename columns in match summary file and remove uneeded ones
full_summaries.rename(columns={'round_x':'round', \
'date_x':'date',\
'time_x':'time'},inplace=True)
full_summaries.drop(["round_y","date_y","time_y","hometeam","awayteam",\
"year"], \
axis=1,inplace=True)
player_full = player_full.reindex(sorted(player_full.columns), axis=1)
#Turn stat columns into integers
player_full[['AFLfantasy','centre_clearances','disposal_efficiency',\
'effective_disposals','goal_assists','intercepts',\
'metres_gained','stoppage_clearances',\
'score_involvements','Supercoach',\
'tackles_in_50','turnovers',\
'behinds','bounces','brownlow',\
'clangers','clearances','contested_marks',\
'contested_poss','disposals','frees_against',\
'frees_for','goal_assists','goals','handballs',\
'hitouts','inside50','kicks',\
'marks','marks_in_50','number','one_percenters',\
'rebound50','tackles','tog','uncontested_poss']]=\
player_full[['AFLfantasy','CCL','DE','ED','GA','ITC',\
'MG','SCL','SI','Supercoach','T5','TO',\
'behinds','bounces','brownlow',\
'clangers','clearances','contested_marks',\
'contested_poss','disposals','frees_against',\
'frees_for','goal_assists','goals','handballs',\
'hitouts','inside50','kicks',\
'marks','marks_in_50','number','one_percenters',\
'rebound50','tackles','tog',\
'uncontested_poss']].apply(pd.to_numeric,errors='coerce')
player_full.drop(['BO','CCL','CM','CP','DE','ED','GA','ITC','MG',\
'MI5','P1','SCL','SI','T5','TO',\
'UP',\
'gameID','ha',\
'name'],axis=1,inplace=True)
#Drop any duplicate games
full_summaries.drop_duplicates(subset="matchid",inplace=True)
player_full.drop_duplicates(subset="fullkey",inplace=True)
#Convert blank number fields to zeroes
full_summaries['crowd'] = full_summaries['crowd'].apply(
lambda x: 0 if x == "" else x)
#Remove trailing spaces on names
player_full["first_name"] = player_full["first_name"].str.strip()
full_summaries["umpire1"] = full_summaries["umpire1"].str.strip()
full_summaries["umpire2"] = full_summaries["umpire2"].str.strip()
full_summaries["umpire3"] = full_summaries["umpire3"].str.strip()
#Create final scores and make them ints
full_summaries["hscore"] = full_summaries.apply(lambda row: row["hteam_q4"].split(".")[2],axis=1)
full_summaries["ascore"] = full_summaries.apply(lambda row: row["ateam_q4"].split(".")[2],axis=1)
full_summaries ["hscore"] = pd.to_numeric(full_summaries["hscore"])
full_summaries ["ascore"] = pd.to_numeric(full_summaries["ascore"])
full_summaries['crowd'] = pd.to_numeric(full_summaries['crowd'], errors='coerce').fillna(0)
#Add season column for player stats
player_full["season"] = player_full.apply(f.fillYear,axis=1)
#player_full.to_sparse()
temp_pf = player_full.fillna("0")
player_full = temp_pf
#######
#
# PROCESS SCORING PROGRESSION AND QUARTER LENGTHS
#
#######
quarters["minutes"] = quarters.apply(lambda row: int(row["minutes"].replace("m","")),axis=1)
quarters["seconds"] = quarters.apply(lambda row: int(row["seconds"].replace("s","")),axis=1)
progression["minutes"] = progression.apply(lambda row: int(row["minutes"]),axis=1)
progression["seconds"] = progression.apply(lambda row: int(row["seconds"]),axis=1)
progression["quarter"] = progression.apply(lambda row: int(row["quarter"]),axis=1)
#Output to CSV
player_full.to_csv(d+"/bench/players.csv", mode="w", index=False)
full_summaries.to_csv(d+"/bench/matches.csv", mode="w", index=False)
progression.to_csv(d+"/bench/progression.csv", mode="w", index=False)
quarters.to_csv(d+"/bench/quarters.csv", mode="w", index=False)
|
import os
import json
import Orange
external_datasets = [
("iris_url", "https://raw.githubusercontent.com/biolab/orange3/master/Orange/datasets/iris.tab"),
]
def data_info(name, location):
print(location)
Orange.data.Variable._clear_all_caches()
data = Orange.data.Table(location)
attr = data.domain.attributes
class_var = data.domain.class_var
return {
'name': name,
'location': location,
'rows': len(data),
'features': {
'discrete': sum(a.is_discrete for a in attr),
'continuous': sum(a.is_continuous for a in attr),
'meta': len(data.domain.metas),
},
'missing': bool(data.has_missing()),
'target': {
'type': 'discrete' if data.domain.has_discrete_class else 'continuous',
'values': len(class_var.values) if data.domain.has_discrete_class else None,
}
}
if __name__ == "__main__":
info = dict()
for name, location in external_datasets:
info[name] = data_info(name, location)
for fname in os.listdir('.'):
if not os.path.isfile(fname):
continue
name, ext = os.path.splitext(fname)
if ext != '.tab':
continue
info[name] = data_info(name, fname)
with open('datasets.info', 'w') as f:
json.dump(info, f, indent=4, sort_keys=True)
|
import sys, os, argparse
from random import randint
sys.path.append(sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/corelib/"))
import core
def optParse(errorflag):
parser = argparse.ArgumentParser(description="Runs RAxML on a single .fa file or a directory full of .fa files. Dependencies: core, RAxML");
parser.add_argument("-i", dest="input", help="Input. Either a directory containing many FASTA files or a single FASTA file.");
parser.add_argument("-r", dest="raxml_path", help="You can specify the full path to your RAxML executable here. Default: raxml (assumes you either have an alias or it is in your PATH.", default="raxml");
parser.add_argument("-m", dest="raxml_model", help="The DNA or AA model you wish RAxML to use.");
parser.add_argument("-b", dest="bootstrap_reps", help="The number of bootstrap replicates you wish RAxML to run with its rapid bootstrapping algorithm. Default: 0", type=int, default=0);
parser.add_argument("-t", dest="num_threads", help="The number of threads you wish to use for the analysis. Default: 1", type=int, default=1);
parser.add_argument("-v", dest="verbosity", help="An option to control the output printed to the screen. 1: print all RAxML output, 0: print only a progress bar. Default: 1", type=int, default=1);
parser.add_argument("-c", dest="constraint_tree", help="A file containing a constraint tree to be used with RAxML's -g option.");
parser.add_argument("--bl", dest="estimate_bl", help="Use with -c to set RAxML to '-f e' to estimate branch lengths only on the constraint tree", action="store_true");
parser.add_argument("-o", dest="output_dir", help="The name of the output directory for this run. Default: [datetime]-run_raxml", default="");
parser.add_argument("-l", dest="log_opt", help="A boolean option to tell the script whether to create a logfile (1) or not (0). Default: 1", type=int, default=1);
args = parser.parse_args();
if errorflag == 0:
if args.input == None or args.raxml_model == None:
parser.print_help();
sys.exit();
if args.bootstrap_reps < 0:
core.errorOut(1, "-b can take only positive values");
optParse(1);
if args.bootstrap_reps > 100:
print " ---------------------------------------------------------------------------------------------------";
print "|*Warning: You have specified more than 100 bootstrap replicates. This could take a very long time. |";
print " ---------------------------------------------------------------------------------------------------";
if args.num_threads <= 0:
core.errorOut(2, "-t can take only positive, non-zero values");
optParse(1);
if args.verbosity not in [0,1]:
core.errorOut(3, "-v must take values of either 1 or 0");
optParse(1);
if args.constraint_tree != None and not os.path.exists(args.constraint_tree):
core.errorOut(4, "Cannot find constraint tree (-c) file!");
optParse(1);
if args.estimate_bl and args.constraint_tree == None:
core.errorOut(5, "With --bl set, a constraint tree must also be set with -c");
optParse(1);
if args.log_opt not in [0,1]:
core.errorOut(6, "-l mus take values of either 1 or 0");
optParse(1);
return args.input, args.raxml_path, args.raxml_model, args.bootstrap_reps, args.num_threads, args.verbosity, args.constraint_tree, args.estimate_bl, args.output_dir, args.log_opt;
elif errorflag == 1:
parser.print_help();
sys.exit();
ins, rax_path, model, b, t, v, const_tree, bl_opt, script_outdir, l = optParse(0);
starttime = core.getLogTime();
if script_outdir == "":
if os.path.isfile(ins):
fileflag = 1;
indir = os.path.dirname(os.path.realpath(ins));
filelist = [os.path.abspath(ins)];
indir, script_outdir = core.getOutdir(indir, "run_raxml", starttime);
ins = indir;
else:
fileflag = 0;
indir, script_outdir = core.getOutdir(ins, "run_raxml", starttime);
filelist = os.listdir(indir);
ins = indir
else:
counter = 1;
while os.path.exists(script_outdir):
if counter == 1:
script_outdir = script_outdir + "-" + str(counter);
else:
script_outdir = script_outdir[:script_outdir.index("-")+1] + str(counter);
counter += 1;
if os.path.isfile(ins):
fileflag = 1;
filelist = [os.path.abspath(ins)];
else:
fileflag = 0;
filelist = os.listdir(ins);
ins = os.path.abspath(ins);
script_outdir = os.path.abspath(script_outdir);
bestdir = os.path.join(script_outdir, "raxml-best");
outdir = os.path.join(script_outdir, "raxml-out");
print core.getTime() + " | Creating main output directory:\t" + script_outdir;
os.system("mkdir '" + script_outdir +"'");
logfilename = os.path.join(script_outdir, "run_raxml.log");
core.filePrep(logfilename);
core.logCheck(l, logfilename, "=======================================================================");
core.logCheck(l, logfilename, "\t\t\tBuilding trees with RAxML");
core.logCheck(l, logfilename, "\t\t\t" + core.getDateTime());
if fileflag == 1:
core.logCheck(l, logfilename, "INPUT | Making tree from file:\t\t" + ins);
else:
core.logCheck(l, logfilename, "INPUT | Making trees from all files in:\t" + ins);
core.logCheck(l, logfilename, "INPUT | RAxML path set to:\t\t\t" + rax_path);
core.logCheck(l, logfilename, "INFO | Using the following DNA or AA model:\t" + model);
if b > 0:
core.logCheck(l, logfilename, "INFO | Performing " + str(b) + " bootstrap replicates per tree.");
else:
core.logCheck(l, logfilename, "INFO | Not performing bootstrap analysis.");
if const_tree != None:
core.logCheck(l, logfilename, "INFO | Using constraint tree in file:" + const_tree);
const_tree = os.path.abspath(const_tree);
if t > 1:
core.logCheck(l, logfilename, "INFO | Using " + str(t) + " threads.");
else:
core.logCheck(l, logfilename, "INFO | Using 1 thread");
if v == 1:
core.logCheck(l, logfilename, "INFO | Printing all RAxML output to the screen.");
else:
core.logCheck(l, logfilename, "INFO | Silent mode. Not printing RAxML output to the screen.");
core.logCheck(l, logfilename, "OUTPUT | An output directory has been created within the input directory called:\t" + script_outdir);
core.logCheck(l, logfilename, "OUTPUT | Best trees will be placed in raxml_best/, all other RAxML output will be placed in raxml_out/");
core.logCheck(l, logfilename, "-------------------------------------");
if not os.path.exists(outdir):
cmd = "mkdir '" + outdir + "'";
os.system(cmd);
if not os.path.exists(bestdir):
cmd = "mkdir '" + bestdir + "'";
os.system(cmd);
seedfile = open(os.path.join(script_outdir, "raxml-seeds.txt"), "w");
if b > 0:
bseedfile = open(os.path.join(script_outdir, "raxml-bseeds.txt"), "w");
core.logCheck(l, logfilename, core.getTime() + " | Starting RAxML runs...\n");
if v == 0:
rax_logfile = os.path.join(script_outdir, "raxml.log");
i = 0;
numbars = 0;
donepercent = [];
trees = {};
for each in filelist:
if ".fa" not in each:
continue;
if v == 0 and fileflag == 0:
numbars, donepercent = core.loadingBar(i, len(filelist), donepercent, numbars);
i += 1;
if fileflag == 1:
rax_infile = each;
# if each.find("/") != -1:
# rax_outfile = each[each.rfind("/")+1:each.index(".",each.rfind("/")+1)];
# else:
# rax_outfile = each[:each.index(".")];
else:
rax_infile = os.path.join(ins, each);
#rax_outfile = each[:each.index(".")];
rax_outfile = os.path.basename(each);
rax_outfile = rax_outfile[:rax_outfile.index(".")];
rax_outdir = os.path.join(outdir, rax_outfile + "-raxout/");
if not os.path.exists(rax_outdir):
os.system("mkdir '" + rax_outdir + "'");
seed = str(randint(1000000,999999999));
seedfile.write(each + "\t" + str(seed) +"\n");
if b > 0:
boot_seed = str(randint(1000000,999999999));
bseedfile.write(each + "\t" + str(boot_seed) +"\n");
##Generate the starting seed and bootstrap seeds (if applicable).
rax_cmd = rax_path + " ";
if b > 0:
rax_cmd = rax_cmd + "-f a ";
rax_cmd = rax_cmd + " -m " + model + " -p " + seed;
if b > 0:
rax_cmd = rax_cmd + " -x " + boot_seed + " -# " + str(b);
if t > 1:
rax_cmd = rax_cmd + " -T " + str(t);
if const_tree != None:
rax_cmd += " -g " + const_tree;
if bl_opt:
rax_cmd += " -f e";
rax_cmd = rax_cmd + " -s '" + rax_infile + "' -n '" + rax_outfile + "' -w '" + script_outdir + "'";
if v == 0:
rax_cmd = rax_cmd + " >> " + rax_logfile;
##Building the RAxML command based on the input parameters.
if v == 1 or fileflag == 1:
core.logCheck(l, logfilename, core.getTime() + " | RAxML Call:\t" + rax_cmd);
else:
lfile = open(logfilename, "a");
lfile.write(core.getTime() + " | RAxML Call:\t" + rax_cmd + "\n");
lfile.close();
os.system(rax_cmd);
##The RAxML call
newfileList = os.listdir(script_outdir);
for neweach in newfileList:
full_file = os.path.join(script_outdir, neweach);
if neweach.find("RAxML_bestTree") != -1:
if b == 0:
trees[rax_outfile] = open(full_file, "r").read();
mv_cmd = "mv '" + full_file + "' '" + bestdir + "'";
os.system(mv_cmd);
elif neweach.find("RAxML") != -1 and neweach != "RAxML_best" and neweach != "raxml_seeds" and neweach != "RAxML_out" and neweach != "raxml_bseeds":
if b > 0 and "bipartitions." in neweach:
trees[rax_outfile] = open(full_file, "r").read();
mv_cmd = "mv '" + full_file + "' '" + rax_outdir + "'";
os.system(mv_cmd);
if os.path.exists(rax_infile + ".reduced"):
mv_cmd = "mv '" + rax_infile + ".reduced '" + rax_outdir + "'";
os.system(mv_cmd);
if v == 0:
pstring = "100.0% complete.\n";
sys.stderr.write('\b' * len(pstring) + pstring);
gtfile = open(os.path.join(script_outdir, "best-trees.txt"), "w");
astralfile = open(os.path.join(script_outdir, "gt-for-astral.txt"), "w");
sdmfile = open(os.path.join(script_outdir, "gt-for-sdm.txt"), "w");
if b > 0:
astralbsfile = open(os.path.join(script_outdir, "bs-for-astral.txt"), "w");
sdmfile.write(str(len(trees)) + "\n");
for tree in trees:
gtfile.write(tree + "\t" + trees[tree]);
astralfile.write(trees[tree]);
sdmfile.write(trees[tree]);
if b > 0:
cur_bsfile = os.path.join(rax_outdir, tree + "_raxout", "RAxML_bootstrap." + tree);
astralbsfile.write(cur_bsfile + "\n");
gtfile.close();
astralfile.close();
sdmfile.close();
if b > 0:
astralbsfile.close();
core.logCheck(l, logfilename, core.getTime() + " | Done!");
core.logCheck(l, logfilename, "=======================================================================");
|
def getReddeningLaw(law='fitzpatrick99',Rv=3.1,inv=False):
import numpy as np
from scipy import interpolate
import extinction
# Wavelength ranges (lambda_min - lambda_max) of the various reddening laws
# (in Angstroms)...
lambda_min = {'ccm89': 1250.,
'odonnell94': 1250.,
'calzetti00': 1200.,
'fitzpatrick99': 910.,
'fm07': 910.}
lambda_max = {'ccm89': 33000.,
'odonnell94': 33000.,
'calzetti00': 22000.,
'fitzpatrick99': 60000.,
'fm07': 60000.}
# We can extract the list of supported reddening laws by
# grabbing those that are keys within the lambda_min dictionary...
supported_laws = lambda_min.keys()
# If reddening law not in the the list of supported reddening laws,
# return an Exception...
if law not in supported_laws:
print """Un-supported reddening law: %s""" % (law)
print 'Supported reddening laws are: ', supported_laws
print 'Returning exception'
return Exception
# Calculate and return the reddening law in either
# inverse wavelength form (inv=True) or in wavelength
# form (inv=False)...
if inv==True:
# Use inverse microns to call to "extinction" module
# and return reddening law in inverse Angstroms...
# Calculate inverse wavelengths...
x_lambda_min = 1.0e4/lambda_max[law]
x_lambda_max = 1.0e4/lambda_min[law]
x_micron = np.linspace(x_lambda_min, x_lambda_max, 2000) # microns^-1
x_angstrom = x_micron * 1.0e-4 # Convert from microns^-1 to Anstroms^-1
# Call appropriate reddening law function...
if law == 'ccm89':
r_array = Rv*extinction.ccm89(x_micron, 1.0, Rv, unit='invum')
elif law == 'odonnell94':
r_array = Rv*extinction.odonnell94(x_micron, 1.0, Rv, unit='invum')
elif law == 'calzetti00':
r_array = Rv*extinction.calzetti00(x_micron, 1.0, Rv, unit='invum')
elif law == 'fitzpatrick99':
r_array = Rv*extinction.fitzpatrick99(x_micron, 1.0, Rv, unit='invum')
elif law == 'fm07':
r_array = Rv*extinction.fm07(x_micron, 1.0, unit='invum')
# Create interpolation function for reddening law...
r = interpolate.interp1d(x_angstrom, r_array,
bounds_error=False, fill_value=0., kind=3)
else:
# Use Angstroms to call to "extinction" module
# and return reddening law in Angstroms...
# Create wavelength array...
angstrom = np.logspace(np.log10(lambda_min[law]), np.log10(lambda_max[law]), 2000)
# Call appropriate reddening law function...
if law == 'ccm89':
r_array = Rv*extinction.ccm89(angstrom, 1.0, Rv, unit='aa')
elif law == 'odonnell94':
r_array = Rv*extinction.odonnell94(angstrom, 1.0, Rv, unit='aa')
elif law == 'calzetti00':
r_array = Rv*extinction.calzetti00(angstrom, 1.0, Rv, unit='aa')
elif law == 'fitzpatrick99':
r_array = Rv*extinction.fitzpatrick99(angstrom, 1.0, Rv, unit='aa')
elif law == 'fm07':
r_array = Rv*extinction.fm07(angstrom, 1.0, unit='aa')
# Create interpolation function for reddening law...
r = interpolate.interp1d(angstrom, r_array,
bounds_error=False, fill_value=0., kind='linear')
# Return interpolation fucntion...
return r
def getReddeningOrig():
import numpy as np
from scipy import interpolate
# Fitzpatrick 1999
wavelength, a = zip(*[[2600, 6.591],
[2700, 6.265],
[4110, 4.315],
[4670, 3.806],
[5470, 3.055],
[6000, 2.688],
[12200, 0.829],
[26500, 0.265],
[1000000, 0.]])
r = interpolate.interp1d(1. / np.array(wavelength), a,
bounds_error=False, fill_value=0., kind=3) # NORMAL
return r
|
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
cmdclass = {'build_ext': build_ext},
ext_modules = [
Extension('message_passing.message',
language='c++',
sources=['message_passing/message.pyx'],
libraries=['xon-objectxx', 'xon-clientxx']
)
]
)
|
from ert.cwrap import BaseCClass, CWrapper
from ert.enkf import ENKF_LIB, NodeId
class BlockObservation(BaseCClass):
def __init__(self):
raise NotImplementedError("Class can not be instantiated directly!")
def getCoordinate(self, index):
""" @rtype: tuple of (int, int, int) """
i = BlockObservation.cNamespace().iget_i(self, index)
j = BlockObservation.cNamespace().iget_j(self, index)
k = BlockObservation.cNamespace().iget_k(self, index)
return i, j, k
def __len__(self):
""" @rtype: int """
return BlockObservation.cNamespace().get_size(self)
def __iter__(self):
cur = 0
while cur < len(self):
yield cur
cur += 1
def getValue(self, index):
""" @rtype: float """
return BlockObservation.cNamespace().get_value(self, index)
def getStd(self, index):
""" @rtype: float """
return BlockObservation.cNamespace().get_std(self, index)
def getDepth(self, index):
""" @rtype: float """
return BlockObservation.cNamespace().get_depth(self, index)
def getData(self, state, obs_index, node_id):
"""
@type state: c_void_p
@type obs_index: int
@type node_id: NodeId
@rtype: float """
return BlockObservation.cNamespace().iget_data(self, state, obs_index, node_id)
def free(self):
BlockObservation.cNamespace().free(self)
cwrapper = CWrapper(ENKF_LIB)
cwrapper.registerObjectType("block_obs", BlockObservation)
BlockObservation.cNamespace().free = cwrapper.prototype("void block_obs_free( block_obs )")
BlockObservation.cNamespace().iget_i = cwrapper.prototype("int block_obs_iget_i(block_obs, int)")
BlockObservation.cNamespace().iget_j = cwrapper.prototype("int block_obs_iget_j( block_obs, int)")
BlockObservation.cNamespace().iget_k = cwrapper.prototype("int block_obs_iget_k( block_obs , int)")
BlockObservation.cNamespace().get_size = cwrapper.prototype("int block_obs_get_size( block_obs )")
BlockObservation.cNamespace().get_std = cwrapper.prototype("double block_obs_iget_std( block_obs, int )")
BlockObservation.cNamespace().get_value = cwrapper.prototype("double block_obs_iget_value( block_obs, int)")
BlockObservation.cNamespace().get_depth = cwrapper.prototype("double block_obs_iget_depth( block_obs, int)")
BlockObservation.cNamespace().iget_data = cwrapper.prototype("double block_obs_iget_data(block_obs, c_void_p, int, node_id)")
|
from django.shortcuts import render_to_response
from django.views.generic.create_update import *
from django.views.generic.list_detail import *
from django.http import HttpResponseRedirect
from django.contrib.auth.forms import UserCreationForm
from django.forms.models import ModelFormMetaclass
|
from bricolage.experiment import Experiment, Treatment
from pathlib import Path
from bricolage.threshold3 import Parameters, Population, DefaultTarget
def target1(a, b):
if a and b:
return 1.0
return 0.0
class MyTreatment(Treatment):
def run_replicate(self, replicate, lineage):
if len(lineage.targets) == 0:
lineage.add_target(DefaultTarget(lineage.world, target1))
while lineage.generation < 100:
lineage.next_generation()
_params = Parameters(
cis_count=2,
reg_channels=1,
out_channels=1,
cue_channels=2,
population_size=100,
mutation_rate=0.001,
)
def test_exp1(tmpdir):
tmpdir = Path(str(tmpdir))
treats = [MyTreatment("bob", _params, 10)]
e = Experiment(tmpdir, treats, seed=1)
e.run()
class CloningTreatment(Treatment):
def run_replicate(self, replicate, lineage):
if len(lineage.targets) == 0:
lineage.add_target(DefaultTarget(lineage.world, target1))
while lineage.generation < 100:
lineage.next_generation()
def make_initial_population(self, replicate, factory, size):
p = Population(factory)
n = factory.create_network()
# Note: we need to do this because the identifiers of the networks
# must match their index in the database (and we just created one
# above).
factory.world.next_network_id = 0
p.fill(n, size)
return p
def test_exp2(tmpdir):
tmpdir = Path(str(tmpdir))
treats = [CloningTreatment("bob", _params, 10)]
e = Experiment(tmpdir, treats, seed=1)
e.run()
|
import libpylshbox
import numpy as np
import time
print 'prepare test data'
float_mat = np.random.randn(100000, 192)
float_query = float_mat[0]
print ''
print 'Test itqLsh'
print ''
print 'First time, need to constructing index.' # About 7s.
start = time.time()
itq_mat = libpylshbox.itqlsh()
itq_mat.init_mat(float_mat.tolist(), 'pyitq.lsh', 521, 5, 8, 100, 50)
result = itq_mat.query(float_query.tolist(), 2, 10)
indices, dists = result[0], result[1]
for i in range(len(indices)):
print indices[i], '\t', dists[i]
print 'Elapsed time is %f seconds.' % (time.time() - start)
print ''
print 'Second time, no need to re-indexing.' # About 3s.
start = time.time()
itq_mat2 = libpylshbox.itqlsh()
itq_mat2.init_mat(float_mat.tolist(), 'pyitq.lsh')
result = itq_mat2.query(float_query.tolist(), 2, 10)
indices, dists = result[0], result[1]
for i in range(len(indices)):
print indices[i], '\t', dists[i]
print 'Elapsed time is %f seconds.' % (time.time() - start)
|
import pytest
from django.core import mail
from apps.public.book.models import Book
from apps.public.book.test.factories import BookFactory
@pytest.mark.django_db
class TestBookQueryset:
def test_published_sub_book_is_published_if_parent_is_published(self):
book = BookFactory(is_published=True)
sub_book = BookFactory(parent_book=book)
assert Book.objects.all().published().count() == 2
def test_top_level_filters_all_subbooks(self):
book = BookFactory(is_published=True)
sub_book = BookFactory(parent_book=book)
assert Book.objects.all().published().top_level().count() == 1
assert Book.objects.all().published().top_level().first().pk == book.pk
@pytest.mark.django_db
class TestBook:
@pytest.mark.parametrize('is_published', (True, False))
def test_email_is_sent_after_book_is_published_or_unpublished(self, is_published):
book = BookFactory(is_published=is_published)
book.is_published = not is_published
book.save()
assert len(mail.outbox) == 1
assert mail.outbox[0].to == ['foo@bar.com', 'foo@baz.com']
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Activity.summary'
db.add_column(u'activity', 'summary', self.gf('django.db.models.fields.TextField')(default=''), keep_default=False)
def backwards(self, orm):
# Deleting field 'Activity.summary'
db.delete_column(u'activity', 'summary')
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'symmetrical': 'False', 'through': "orm['askbot.ActivityAuditStatus']", 'to': "orm['auth.User']"}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['askbot.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.answerrevision': {
'Meta': {'ordering': "('-revision',)", 'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'ordering': "('slug',)", 'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "orm['askbot.Award']", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'askbot.comment': {
'Meta': {'ordering': "('-added_at',)", 'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'offensive_flag_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'askbot.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'symmetrical': 'False', 'through': "orm['askbot.FavoriteQuestion']", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.questionrevision': {
'Meta': {'ordering': "('-revision',)", 'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'ordering': "('-used_count', 'name')", 'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'has_custom_avatar': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot']
|
from odoo import api, fields, models
import logging
_logger = logging.getLogger(__name__)
class View(models.Model):
"""
Add a new selection "Mako" in view types
"""
_inherit = "ir.ui.view"
type = fields.Selection(selection_add=[('mako','Mako')])
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.